forked from mirror/nheko
parent
c973fd759b
commit
7a206441c8
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,74 @@ |
||||
#include <QHBoxLayout> |
||||
#include <QIcon> |
||||
#include <QLabel> |
||||
#include <QString> |
||||
|
||||
#include "ActiveCallBar.h" |
||||
#include "WebRTCSession.h" |
||||
#include "ui/FlatButton.h" |
||||
|
||||
ActiveCallBar::ActiveCallBar(QWidget *parent) |
||||
: QWidget(parent) |
||||
{ |
||||
setAutoFillBackground(true); |
||||
auto p = palette(); |
||||
p.setColor(backgroundRole(), Qt::green); |
||||
setPalette(p); |
||||
|
||||
QFont f; |
||||
f.setPointSizeF(f.pointSizeF()); |
||||
|
||||
const int fontHeight = QFontMetrics(f).height(); |
||||
const int widgetMargin = fontHeight / 3; |
||||
const int contentHeight = fontHeight * 3; |
||||
|
||||
setFixedHeight(contentHeight + widgetMargin); |
||||
|
||||
topLayout_ = new QHBoxLayout(this); |
||||
topLayout_->setSpacing(widgetMargin); |
||||
topLayout_->setContentsMargins( |
||||
2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin); |
||||
topLayout_->setSizeConstraint(QLayout::SetMinimumSize); |
||||
|
||||
QFont labelFont; |
||||
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.2); |
||||
labelFont.setWeight(QFont::Medium); |
||||
|
||||
callPartyLabel_ = new QLabel(this); |
||||
callPartyLabel_->setFont(labelFont); |
||||
|
||||
// TODO microphone mute/unmute icons
|
||||
muteBtn_ = new FlatButton(this); |
||||
QIcon muteIcon; |
||||
muteIcon.addFile(":/icons/icons/ui/do-not-disturb-rounded-sign.png"); |
||||
muteBtn_->setIcon(muteIcon); |
||||
muteBtn_->setIconSize(QSize(buttonSize_ / 2, buttonSize_ / 2)); |
||||
muteBtn_->setToolTip(tr("Mute Mic")); |
||||
muteBtn_->setFixedSize(buttonSize_, buttonSize_); |
||||
muteBtn_->setCornerRadius(buttonSize_ / 2); |
||||
connect(muteBtn_, &FlatButton::clicked, this, [this]() { |
||||
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_)) { |
||||
QIcon icon; |
||||
if (muted_) { |
||||
muteBtn_->setToolTip("Unmute Mic"); |
||||
icon.addFile(":/icons/icons/ui/round-remove-button.png"); |
||||
} else { |
||||
muteBtn_->setToolTip("Mute Mic"); |
||||
icon.addFile(":/icons/icons/ui/do-not-disturb-rounded-sign.png"); |
||||
} |
||||
muteBtn_->setIcon(icon); |
||||
} |
||||
}); |
||||
|
||||
topLayout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft); |
||||
topLayout_->addWidget(muteBtn_, 0, Qt::AlignRight); |
||||
} |
||||
|
||||
void |
||||
ActiveCallBar::setCallParty(const QString &userid, const QString &displayName) |
||||
{ |
||||
if (!displayName.isEmpty() && displayName != userid) |
||||
callPartyLabel_->setText("Active Call: " + displayName + " (" + userid + ")"); |
||||
else |
||||
callPartyLabel_->setText("Active Call: " + userid); |
||||
} |
@ -0,0 +1,26 @@ |
||||
#pragma once |
||||
|
||||
#include <QWidget> |
||||
|
||||
class QHBoxLayout; |
||||
class QLabel; |
||||
class QString; |
||||
class FlatButton; |
||||
|
||||
class ActiveCallBar : public QWidget |
||||
{ |
||||
Q_OBJECT |
||||
|
||||
public: |
||||
ActiveCallBar(QWidget *parent = nullptr); |
||||
|
||||
public slots: |
||||
void setCallParty(const QString &userid, const QString &displayName); |
||||
|
||||
private: |
||||
QHBoxLayout *topLayout_ = nullptr; |
||||
QLabel *callPartyLabel_ = nullptr; |
||||
FlatButton *muteBtn_ = nullptr; |
||||
int buttonSize_ = 32; |
||||
bool muted_ = false; |
||||
}; |
@ -0,0 +1,315 @@ |
||||
#include <chrono> |
||||
|
||||
#include <QMediaPlaylist> |
||||
#include <QUrl> |
||||
|
||||
#include "CallManager.h" |
||||
#include "Cache.h" |
||||
#include "ChatPage.h" |
||||
#include "Logging.h" |
||||
#include "MainWindow.h" |
||||
#include "MatrixClient.h" |
||||
#include "UserSettingsPage.h" |
||||
#include "WebRTCSession.h" |
||||
|
||||
#include "dialogs/AcceptCall.h" |
||||
|
||||
Q_DECLARE_METATYPE(std::vector<mtx::events::msg::CallCandidates::Candidate>) |
||||
Q_DECLARE_METATYPE(mtx::responses::TurnServer) |
||||
|
||||
using namespace mtx::events; |
||||
using namespace mtx::events::msg; |
||||
|
||||
// TODO Allow altenative in settings
|
||||
#define STUN_SERVER "stun://turn.matrix.org:3478"
|
||||
|
||||
CallManager::CallManager(QSharedPointer<UserSettings> userSettings) |
||||
: QObject(), |
||||
session_(WebRTCSession::instance()), |
||||
turnServerTimer_(this), |
||||
settings_(userSettings) |
||||
{ |
||||
qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>(); |
||||
qRegisterMetaType<mtx::responses::TurnServer>(); |
||||
|
||||
connect(&session_, &WebRTCSession::offerCreated, this, |
||||
[this](const std::string &sdp, |
||||
const std::vector<mtx::events::msg::CallCandidates::Candidate>& candidates) |
||||
{ |
||||
nhlog::ui()->debug("Offer created with callid_ and roomid_: {} {}", callid_, roomid_.toStdString()); |
||||
emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_}); |
||||
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); |
||||
}); |
||||
|
||||
connect(&session_, &WebRTCSession::answerCreated, this, |
||||
[this](const std::string &sdp, |
||||
const std::vector<mtx::events::msg::CallCandidates::Candidate>& candidates) |
||||
{ |
||||
nhlog::ui()->debug("Answer created with callid_ and roomid_: {} {}", callid_, roomid_.toStdString()); |
||||
emit newMessage(roomid_, CallAnswer{callid_, sdp, 0}); |
||||
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); |
||||
}); |
||||
|
||||
connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer); |
||||
turnServerTimer_.start(2000); |
||||
|
||||
connect(this, &CallManager::turnServerRetrieved, this, |
||||
[this](const mtx::responses::TurnServer &res) |
||||
{ |
||||
nhlog::net()->info("TURN server(s) retrieved from homeserver:"); |
||||
nhlog::net()->info("username: {}", res.username); |
||||
nhlog::net()->info("ttl: {}", res.ttl); |
||||
for (const auto &u : res.uris) |
||||
nhlog::net()->info("uri: {}", u); |
||||
|
||||
turnServer_ = res; |
||||
turnServerTimer_.setInterval(res.ttl * 1000 * 0.9); |
||||
}); |
||||
|
||||
connect(&session_, &WebRTCSession::pipelineChanged, this, |
||||
[this](bool started) { |
||||
if (!started) |
||||
playRingtone("qrc:/media/media/callend.ogg", false); |
||||
}); |
||||
|
||||
connect(&player_, &QMediaPlayer::mediaStatusChanged, this, |
||||
[this](QMediaPlayer::MediaStatus status) { |
||||
if (status == QMediaPlayer::LoadedMedia) |
||||
player_.play(); |
||||
}); |
||||
} |
||||
|
||||
void |
||||
CallManager::sendInvite(const QString &roomid) |
||||
{ |
||||
if (onActiveCall()) |
||||
return; |
||||
|
||||
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString())); |
||||
if (members.size() != 2) { |
||||
emit ChatPage::instance()->showNotification("Voice/Video calls are limited to 1:1 rooms"); |
||||
return; |
||||
} |
||||
|
||||
std::string errorMessage; |
||||
if (!session_.init(&errorMessage)) { |
||||
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); |
||||
return; |
||||
} |
||||
|
||||
roomid_ = roomid; |
||||
setTurnServers(); |
||||
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); |
||||
|
||||
// TODO Add invite timeout
|
||||
generateCallID(); |
||||
const RoomMember &callee = members.front().user_id == utils::localUser() ? members.back() : members.front(); |
||||
emit newCallParty(callee.user_id, callee.display_name); |
||||
playRingtone("qrc:/media/media/ringback.ogg", true); |
||||
if (!session_.createOffer()) { |
||||
emit ChatPage::instance()->showNotification("Problem setting up call"); |
||||
endCall(); |
||||
} |
||||
} |
||||
|
||||
void |
||||
CallManager::hangUp() |
||||
{ |
||||
nhlog::ui()->debug("CallManager::hangUp: roomid_: {}", roomid_.toStdString()); |
||||
if (!callid_.empty()) { |
||||
emit newMessage(roomid_, CallHangUp{callid_, 0, CallHangUp::Reason::User}); |
||||
endCall(); |
||||
} |
||||
} |
||||
|
||||
bool |
||||
CallManager::onActiveCall() |
||||
{ |
||||
return session_.isActive(); |
||||
} |
||||
|
||||
void CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event) |
||||
{ |
||||
if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event) |
||||
|| handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event)) |
||||
return; |
||||
} |
||||
|
||||
template<typename T> |
||||
bool |
||||
CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event) |
||||
{ |
||||
if (std::holds_alternative<RoomEvent<T>>(event)) { |
||||
handleEvent(std::get<RoomEvent<T>>(event)); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
void |
||||
CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent) |
||||
{ |
||||
nhlog::ui()->debug("CallManager::incoming CallInvite from {} with id {}", callInviteEvent.sender, callInviteEvent.content.call_id); |
||||
|
||||
if (callInviteEvent.content.call_id.empty()) |
||||
return; |
||||
|
||||
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id)); |
||||
if (onActiveCall() || members.size() != 2) { |
||||
emit newMessage(QString::fromStdString(callInviteEvent.room_id), |
||||
CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut}); |
||||
return; |
||||
} |
||||
|
||||
playRingtone("qrc:/media/media/ring.ogg", true); |
||||
roomid_ = QString::fromStdString(callInviteEvent.room_id); |
||||
callid_ = callInviteEvent.content.call_id; |
||||
remoteICECandidates_.clear(); |
||||
|
||||
const RoomMember &caller = members.front().user_id == utils::localUser() ? members.back() : members.front(); |
||||
emit newCallParty(caller.user_id, caller.display_name); |
||||
|
||||
auto dialog = new dialogs::AcceptCall(caller.user_id, caller.display_name, MainWindow::instance()); |
||||
connect(dialog, &dialogs::AcceptCall::accept, this, |
||||
[this, callInviteEvent](){ |
||||
MainWindow::instance()->hideOverlay(); |
||||
answerInvite(callInviteEvent.content);}); |
||||
connect(dialog, &dialogs::AcceptCall::reject, this, |
||||
[this](){ |
||||
MainWindow::instance()->hideOverlay(); |
||||
hangUp();}); |
||||
MainWindow::instance()->showSolidOverlayModal(dialog); |
||||
} |
||||
|
||||
void |
||||
CallManager::answerInvite(const CallInvite &invite) |
||||
{ |
||||
stopRingtone(); |
||||
std::string errorMessage; |
||||
if (!session_.init(&errorMessage)) { |
||||
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); |
||||
hangUp(); |
||||
return; |
||||
} |
||||
|
||||
setTurnServers(); |
||||
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); |
||||
|
||||
if (!session_.acceptOffer(invite.sdp)) { |
||||
emit ChatPage::instance()->showNotification("Problem setting up call"); |
||||
hangUp(); |
||||
return; |
||||
} |
||||
session_.acceptICECandidates(remoteICECandidates_); |
||||
remoteICECandidates_.clear(); |
||||
} |
||||
|
||||
void |
||||
CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent) |
||||
{ |
||||
nhlog::ui()->debug("CallManager::incoming CallCandidates from {} with id {}", callCandidatesEvent.sender, callCandidatesEvent.content.call_id); |
||||
if (callid_ == callCandidatesEvent.content.call_id) { |
||||
if (onActiveCall()) |
||||
session_.acceptICECandidates(callCandidatesEvent.content.candidates); |
||||
else { |
||||
// CallInvite has been received and we're awaiting localUser to accept or reject the call
|
||||
for (const auto &c : callCandidatesEvent.content.candidates) |
||||
remoteICECandidates_.push_back(c); |
||||
} |
||||
} |
||||
} |
||||
|
||||
void |
||||
CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent) |
||||
{ |
||||
nhlog::ui()->debug("CallManager::incoming CallAnswer from {} with id {}", callAnswerEvent.sender, callAnswerEvent.content.call_id); |
||||
if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) { |
||||
stopRingtone(); |
||||
if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) { |
||||
emit ChatPage::instance()->showNotification("Problem setting up call"); |
||||
hangUp(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
void |
||||
CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent) |
||||
{ |
||||
nhlog::ui()->debug("CallManager::incoming CallHangUp from {} with id {}", callHangUpEvent.sender, callHangUpEvent.content.call_id); |
||||
if (onActiveCall() && callid_ == callHangUpEvent.content.call_id) |
||||
endCall(); |
||||
} |
||||
|
||||
void |
||||
CallManager::generateCallID() |
||||
{ |
||||
using namespace std::chrono; |
||||
uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count(); |
||||
callid_ = "c" + std::to_string(ms); |
||||
} |
||||
|
||||
void |
||||
CallManager::endCall() |
||||
{ |
||||
stopRingtone(); |
||||
session_.end(); |
||||
roomid_.clear(); |
||||
callid_.clear(); |
||||
remoteICECandidates_.clear(); |
||||
} |
||||
|
||||
void |
||||
CallManager::retrieveTurnServer() |
||||
{ |
||||
http::client()->get_turn_server( |
||||
[this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) { |
||||
if (err) { |
||||
turnServerTimer_.setInterval(5000); |
||||
return; |
||||
} |
||||
emit turnServerRetrieved(res); |
||||
}); |
||||
} |
||||
|
||||
void |
||||
CallManager::setTurnServers() |
||||
{ |
||||
// gstreamer expects (percent-encoded): turn(s)://username:password@host:port?transport=udp(tcp)
|
||||
std::vector<std::string> uris; |
||||
for (const auto &uri : turnServer_.uris) { |
||||
if (auto c = uri.find(':'); c == std::string::npos) { |
||||
nhlog::ui()->error("Invalid TURN server uri: {}", uri); |
||||
continue; |
||||
} |
||||
else { |
||||
std::string scheme = std::string(uri, 0, c); |
||||
if (scheme != "turn" && scheme != "turns") { |
||||
nhlog::ui()->error("Invalid TURN server uri: {}", uri); |
||||
continue; |
||||
} |
||||
std::string res = scheme + "://" + turnServer_.username + ":" + turnServer_.password |
||||
+ "@" + std::string(uri, ++c); |
||||
QString encodedUri = QUrl::toPercentEncoding(QString::fromStdString(res)); |
||||
uris.push_back(encodedUri.toStdString()); |
||||
} |
||||
} |
||||
if (!uris.empty()) |
||||
session_.setTurnServers(uris); |
||||
} |
||||
|
||||
void |
||||
CallManager::playRingtone(const QString &ringtone, bool repeat) |
||||
{ |
||||
static QMediaPlaylist playlist; |
||||
playlist.clear(); |
||||
playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop : QMediaPlaylist::CurrentItemOnce); |
||||
playlist.addMedia(QUrl(ringtone)); |
||||
player_.setVolume(100); |
||||
player_.setPlaylist(&playlist); |
||||
} |
||||
|
||||
void |
||||
CallManager::stopRingtone() |
||||
{ |
||||
player_.setPlaylist(nullptr); |
||||
} |
@ -0,0 +1,67 @@ |
||||
#pragma once |
||||
|
||||
#include <string> |
||||
#include <vector> |
||||
|
||||
#include <QObject> |
||||
#include <QMediaPlayer> |
||||
#include <QSharedPointer> |
||||
#include <QString> |
||||
#include <QTimer> |
||||
|
||||
#include "mtx/events/collections.hpp" |
||||
#include "mtx/events/voip.hpp" |
||||
#include "mtx/responses/turn_server.hpp" |
||||
|
||||
class UserSettings; |
||||
class WebRTCSession; |
||||
|
||||
class CallManager : public QObject |
||||
{ |
||||
Q_OBJECT |
||||
|
||||
public: |
||||
CallManager(QSharedPointer<UserSettings>); |
||||
|
||||
void sendInvite(const QString &roomid); |
||||
void hangUp(); |
||||
bool onActiveCall(); |
||||
|
||||
public slots: |
||||
void syncEvent(const mtx::events::collections::TimelineEvents &event); |
||||
|
||||
signals: |
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallInvite&); |
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates&); |
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer&); |
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp&); |
||||
void turnServerRetrieved(const mtx::responses::TurnServer&); |
||||
void newCallParty(const QString &userid, const QString& displayName); |
||||
|
||||
private slots: |
||||
void retrieveTurnServer(); |
||||
|
||||
private: |
||||
WebRTCSession& session_; |
||||
QString roomid_; |
||||
std::string callid_; |
||||
const uint32_t timeoutms_ = 120000; |
||||
std::vector<mtx::events::msg::CallCandidates::Candidate> remoteICECandidates_; |
||||
mtx::responses::TurnServer turnServer_; |
||||
QTimer turnServerTimer_; |
||||
QSharedPointer<UserSettings> settings_; |
||||
QMediaPlayer player_; |
||||
|
||||
template<typename T> |
||||
bool handleEvent_(const mtx::events::collections::TimelineEvents &event); |
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite>&); |
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates>&); |
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer>&); |
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp>&); |
||||
void answerInvite(const mtx::events::msg::CallInvite&); |
||||
void generateCallID(); |
||||
void endCall(); |
||||
void setTurnServers(); |
||||
void playRingtone(const QString &ringtone, bool repeat); |
||||
void stopRingtone(); |
||||
}; |
@ -0,0 +1,438 @@ |
||||
#include "WebRTCSession.h" |
||||
#include "Logging.h" |
||||
|
||||
extern "C" { |
||||
#include "gst/gst.h" |
||||
#include "gst/sdp/sdp.h" |
||||
|
||||
#define GST_USE_UNSTABLE_API |
||||
#include "gst/webrtc/webrtc.h" |
||||
} |
||||
|
||||
namespace { |
||||
bool gisoffer; |
||||
std::string glocalsdp; |
||||
std::vector<mtx::events::msg::CallCandidates::Candidate> gcandidates; |
||||
|
||||
gboolean newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data); |
||||
GstWebRTCSessionDescription* parseSDP(const std::string &sdp, GstWebRTCSDPType type); |
||||
void generateOffer(GstElement *webrtc); |
||||
void setLocalDescription(GstPromise *promise, gpointer webrtc); |
||||
void addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED); |
||||
gboolean onICEGatheringCompletion(gpointer timerid); |
||||
void createAnswer(GstPromise *promise, gpointer webrtc); |
||||
void addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe); |
||||
void linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe); |
||||
} |
||||
|
||||
bool |
||||
WebRTCSession::init(std::string *errorMessage) |
||||
{ |
||||
if (initialised_) |
||||
return true; |
||||
|
||||
GError *error = nullptr; |
||||
if (!gst_init_check(nullptr, nullptr, &error)) { |
||||
std::string strError = std::string("Failed to initialise GStreamer: "); |
||||
if (error) { |
||||
strError += error->message; |
||||
g_error_free(error); |
||||
} |
||||
nhlog::ui()->error(strError); |
||||
if (errorMessage) |
||||
*errorMessage = strError; |
||||
return false; |
||||
} |
||||
|
||||
gchar *version = gst_version_string(); |
||||
std::string gstVersion(version);
|
||||
g_free(version); |
||||
nhlog::ui()->info("Initialised " + gstVersion); |
||||
|
||||
// GStreamer Plugins:
|
||||
// Base: audioconvert, audioresample, opus, playback, videoconvert, volume
|
||||
// Good: autodetect, rtpmanager, vpx
|
||||
// Bad: dtls, srtp, webrtc
|
||||
// libnice [GLib]: nice
|
||||
initialised_ = true; |
||||
std::string strError = gstVersion + ": Missing plugins: "; |
||||
const gchar *needed[] = {"audioconvert", "audioresample", "autodetect", "dtls", "nice", |
||||
"opus", "playback", "rtpmanager", "srtp", "videoconvert", "vpx", "volume", "webrtc", nullptr}; |
||||
GstRegistry *registry = gst_registry_get(); |
||||
for (guint i = 0; i < g_strv_length((gchar**)needed); i++) { |
||||
GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); |
||||
if (!plugin) { |
||||
strError += needed[i]; |
||||
initialised_ = false; |
||||
continue; |
||||
} |
||||
gst_object_unref(plugin); |
||||
} |
||||
|
||||
if (!initialised_) { |
||||
nhlog::ui()->error(strError); |
||||
if (errorMessage) |
||||
*errorMessage = strError; |
||||
} |
||||
return initialised_; |
||||
} |
||||
|
||||
bool |
||||
WebRTCSession::createOffer() |
||||
{ |
||||
gisoffer = true; |
||||
glocalsdp.clear(); |
||||
gcandidates.clear(); |
||||
return startPipeline(111); // a dynamic opus payload type
|
||||
} |
||||
|
||||
bool |
||||
WebRTCSession::acceptOffer(const std::string& sdp) |
||||
{ |
||||
nhlog::ui()->debug("Received offer:\n{}", sdp); |
||||
gisoffer = false; |
||||
glocalsdp.clear(); |
||||
gcandidates.clear(); |
||||
|
||||
// eg a=rtpmap:111 opus/48000/2
|
||||
int opusPayloadType = 0; |
||||
if (auto e = sdp.find("opus"); e == std::string::npos) { |
||||
nhlog::ui()->error("WebRTC: remote offer - opus media attribute missing"); |
||||
return false; |
||||
} |
||||
else { |
||||
if (auto s = sdp.rfind(':', e); s == std::string::npos) { |
||||
nhlog::ui()->error("WebRTC: remote offer - unable to determine opus payload type"); |
||||
return false; |
||||
} |
||||
else { |
||||
++s; |
||||
try { |
||||
opusPayloadType = std::stoi(std::string(sdp, s, e - s)); |
||||
} |
||||
catch(...) { |
||||
nhlog::ui()->error("WebRTC: remote offer - unable to determine opus payload type"); |
||||
return false; |
||||
} |
||||
} |
||||
} |
||||
|
||||
GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER); |
||||
if (!offer) |
||||
return false; |
||||
|
||||
if (!startPipeline(opusPayloadType)) |
||||
return false; |
||||
|
||||
// set-remote-description first, then create-answer
|
||||
GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr); |
||||
g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise); |
||||
gst_webrtc_session_description_free(offer); |
||||
return true; |
||||
} |
||||
|
||||
bool |
||||
WebRTCSession::startPipeline(int opusPayloadType) |
||||
{ |
||||
if (isActive()) |
||||
return false; |
||||
|
||||
if (!createPipeline(opusPayloadType)) |
||||
return false; |
||||
|
||||
webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin"); |
||||
|
||||
if (!stunServer_.empty()) { |
||||
nhlog::ui()->info("WebRTC: Setting stun server: {}", stunServer_); |
||||
g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr); |
||||
} |
||||
addTurnServers(); |
||||
|
||||
// generate the offer when the pipeline goes to PLAYING
|
||||
if (gisoffer) |
||||
g_signal_connect(webrtc_, "on-negotiation-needed", G_CALLBACK(generateOffer), nullptr); |
||||
|
||||
// on-ice-candidate is emitted when a local ICE candidate has been gathered
|
||||
g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr); |
||||
|
||||
// incoming streams trigger pad-added
|
||||
gst_element_set_state(pipe_, GST_STATE_READY); |
||||
g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_); |
||||
|
||||
// webrtcbin lifetime is the same as that of the pipeline
|
||||
gst_object_unref(webrtc_); |
||||
|
||||
// start the pipeline
|
||||
GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING); |
||||
if (ret == GST_STATE_CHANGE_FAILURE) { |
||||
nhlog::ui()->error("WebRTC: unable to start pipeline"); |
||||
gst_object_unref(pipe_); |
||||
pipe_ = nullptr; |
||||
webrtc_ = nullptr; |
||||
return false; |
||||
} |
||||
|
||||
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_)); |
||||
gst_bus_add_watch(bus, newBusMessage, this); |
||||
gst_object_unref(bus); |
||||
emit pipelineChanged(true); |
||||
return true; |
||||
} |
||||
|
||||
#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload=" |
||||
|
||||
bool |
||||
WebRTCSession::createPipeline(int opusPayloadType) |
||||
{ |
||||
std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin " |
||||
"autoaudiosrc ! volume name=srclevel ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! " |
||||
"queue ! " RTP_CAPS_OPUS + std::to_string(opusPayloadType) + " ! webrtcbin."); |
||||
|
||||
webrtc_ = nullptr; |
||||
GError *error = nullptr; |
||||
pipe_ = gst_parse_launch(pipeline.c_str(), &error); |
||||
if (error) { |
||||
nhlog::ui()->error("WebRTC: Failed to parse pipeline: {}", error->message); |
||||
g_error_free(error); |
||||
if (pipe_) { |
||||
gst_object_unref(pipe_); |
||||
pipe_ = nullptr; |
||||
} |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
bool |
||||
WebRTCSession::acceptAnswer(const std::string &sdp) |
||||
{ |
||||
nhlog::ui()->debug("WebRTC: Received sdp:\n{}", sdp); |
||||
if (!isActive()) |
||||
return false; |
||||
|
||||
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); |
||||
if (!answer) |
||||
return false; |
||||
|
||||
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr); |
||||
gst_webrtc_session_description_free(answer); |
||||
return true; |
||||
} |
||||
|
||||
void |
||||
WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate>& candidates) |
||||
{ |
||||
if (isActive()) { |
||||
for (const auto& c : candidates) |
||||
g_signal_emit_by_name(webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str()); |
||||
} |
||||
} |
||||
|
||||
bool |
||||
WebRTCSession::toggleMuteAudioSrc(bool &isMuted) |
||||
{ |
||||
if (!isActive()) |
||||
return false; |
||||
|
||||
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel"); |
||||
if (!srclevel) |
||||
return false; |
||||
|
||||
gboolean muted; |
||||
g_object_get(srclevel, "mute", &muted, nullptr); |
||||
g_object_set(srclevel, "mute", !muted, nullptr); |
||||
gst_object_unref(srclevel); |
||||
isMuted = !muted; |
||||
return true; |
||||
} |
||||
|
||||
void |
||||
WebRTCSession::end() |
||||
{ |
||||
if (pipe_) { |
||||
gst_element_set_state(pipe_, GST_STATE_NULL); |
||||
gst_object_unref(pipe_); |
||||
pipe_ = nullptr; |
||||
} |
||||
webrtc_ = nullptr; |
||||
emit pipelineChanged(false); |
||||
} |
||||
|
||||
void |
||||
WebRTCSession::addTurnServers() |
||||
{ |
||||
if (!webrtc_) |
||||
return; |
||||
|
||||
for (const auto &uri : turnServers_) { |
||||
gboolean res; |
||||
g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&res)); |
||||
if (res) |
||||
nhlog::ui()->info("WebRTC: Set TURN server: {}", uri); |
||||
else |
||||
nhlog::ui()->error("WebRTC: Failed to set TURN server: {}", uri); |
||||
} |
||||
} |
||||
|
||||
namespace { |
||||
|
||||
gboolean |
||||
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) |
||||
{ |
||||
WebRTCSession *session = (WebRTCSession*)user_data; |
||||
switch (GST_MESSAGE_TYPE(msg)) { |
||||
case GST_MESSAGE_EOS: |
||||
session->end(); |
||||
break; |
||||
case GST_MESSAGE_ERROR: |
||||
GError *error; |
||||
gchar *debug; |
||||
gst_message_parse_error(msg, &error, &debug); |
||||
nhlog::ui()->error("WebRTC: Error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message); |
||||
g_clear_error(&error); |
||||
g_free(debug); |
||||
session->end(); |
||||
break; |
||||
default: |
||||
break; |
||||
} |
||||
return TRUE; |
||||
} |
||||
|
||||
GstWebRTCSessionDescription* |
||||
parseSDP(const std::string &sdp, GstWebRTCSDPType type) |
||||
{ |
||||
GstSDPMessage *msg; |
||||
gst_sdp_message_new(&msg); |
||||
if (gst_sdp_message_parse_buffer((guint8*)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) { |
||||
return gst_webrtc_session_description_new(type, msg); |
||||
} |
||||
else { |
||||
nhlog::ui()->error("WebRTC: Failed to parse remote session description"); |
||||
gst_object_unref(msg); |
||||
return nullptr; |
||||
} |
||||
} |
||||
|
||||
void |
||||
generateOffer(GstElement *webrtc) |
||||
{ |
||||
// create-offer first, then set-local-description
|
||||
GstPromise *promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
||||
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise); |
||||
} |
||||
|
||||
void |
||||
setLocalDescription(GstPromise *promise, gpointer webrtc) |
||||
{ |
||||
const GstStructure *reply = gst_promise_get_reply(promise); |
||||
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer")); |
||||
GstWebRTCSessionDescription *gstsdp = nullptr; |
||||
gst_structure_get(reply, isAnswer ? "answer" : "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &gstsdp, nullptr); |
||||
gst_promise_unref(promise); |
||||
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr); |
||||
|
||||
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp); |
||||
glocalsdp = std::string(sdp); |
||||
g_free(sdp); |
||||
gst_webrtc_session_description_free(gstsdp); |
||||
|
||||
nhlog::ui()->debug("WebRTC: Local description set ({}):\n{}", isAnswer ? "answer" : "offer", glocalsdp); |
||||
} |
||||
|
||||
void |
||||
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED) |
||||
{ |
||||
gcandidates.push_back({"audio", (uint16_t)mlineIndex, candidate}); |
||||
|
||||
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early
|
||||
// fixed in v1.18
|
||||
// use a 100ms timeout in the meantime
|
||||
static guint timerid = 0; |
||||
if (timerid) |
||||
g_source_remove(timerid); |
||||
|
||||
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid); |
||||
} |
||||
|
||||
gboolean |
||||
onICEGatheringCompletion(gpointer timerid) |
||||
{ |
||||
*(guint*)(timerid) = 0; |
||||
if (gisoffer) |
||||
emit WebRTCSession::instance().offerCreated(glocalsdp, gcandidates); |
||||
else |
||||
emit WebRTCSession::instance().answerCreated(glocalsdp, gcandidates); |
||||
|
||||
return FALSE; |
||||
} |
||||
|
||||
void |
||||
createAnswer(GstPromise *promise, gpointer webrtc) |
||||
{ |
||||
// create-answer first, then set-local-description
|
||||
gst_promise_unref(promise); |
||||
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
||||
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise); |
||||
} |
||||
|
||||
void |
||||
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
||||
{ |
||||
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) |
||||
return; |
||||
|
||||
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); |
||||
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); |
||||
gst_bin_add(GST_BIN(pipe), decodebin); |
||||
gst_element_sync_state_with_parent(decodebin); |
||||
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); |
||||
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) |
||||
nhlog::ui()->error("WebRTC: Unable to link new pad"); |
||||
gst_object_unref(sinkpad); |
||||
} |
||||
|
||||
void |
||||
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
||||
{ |
||||
GstCaps *caps = gst_pad_get_current_caps(newpad); |
||||
if (!caps) |
||||
return; |
||||
|
||||
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); |
||||
gst_caps_unref(caps); |
||||
|
||||
GstPad *queuepad = nullptr; |
||||
GstElement *queue = gst_element_factory_make("queue", nullptr); |
||||
|
||||
if (g_str_has_prefix(name, "audio")) { |
||||
GstElement *convert = gst_element_factory_make("audioconvert", nullptr); |
||||
GstElement *resample = gst_element_factory_make("audioresample", nullptr); |
||||
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); |
||||
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); |
||||
gst_element_sync_state_with_parent(queue); |
||||
gst_element_sync_state_with_parent(convert); |
||||
gst_element_sync_state_with_parent(resample); |
||||
gst_element_sync_state_with_parent(sink); |
||||
gst_element_link_many(queue, convert, resample, sink, nullptr); |
||||
queuepad = gst_element_get_static_pad(queue, "sink"); |
||||
} |
||||
else if (g_str_has_prefix(name, "video")) { |
||||
GstElement *convert = gst_element_factory_make("videoconvert", nullptr); |
||||
GstElement *sink = gst_element_factory_make("autovideosink", nullptr); |
||||
gst_bin_add_many(GST_BIN(pipe), queue, convert, sink, nullptr); |
||||
gst_element_sync_state_with_parent(queue); |
||||
gst_element_sync_state_with_parent(convert); |
||||
gst_element_sync_state_with_parent(sink); |
||||
gst_element_link_many(queue, convert, sink, nullptr); |
||||
queuepad = gst_element_get_static_pad(queue, "sink"); |
||||
} |
||||
|
||||
if (queuepad) { |
||||
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) |
||||
nhlog::ui()->error("WebRTC: Unable to link new pad"); |
||||
gst_object_unref(queuepad); |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,58 @@ |
||||
#pragma once |
||||
|
||||
#include <string> |
||||
#include <vector> |
||||
|
||||
#include <QObject> |
||||
|
||||
#include "mtx/events/voip.hpp" |
||||
|
||||
typedef struct _GstElement GstElement; |
||||
|
||||
class WebRTCSession : public QObject |
||||
{ |
||||
Q_OBJECT |
||||
|
||||
public: |
||||
static WebRTCSession& instance() |
||||
{ |
||||
static WebRTCSession instance; |
||||
return instance; |
||||
} |
||||
|
||||
bool init(std::string *errorMessage = nullptr); |
||||
|
||||
bool createOffer(); |
||||
bool acceptOffer(const std::string &sdp); |
||||
bool acceptAnswer(const std::string &sdp); |
||||
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate>&); |
||||
|
||||
bool isActive() { return pipe_ != nullptr; } |
||||
bool toggleMuteAudioSrc(bool &isMuted); |
||||
void end(); |
||||
|
||||
void setStunServer(const std::string &stunServer) {stunServer_ = stunServer;} |
||||
void setTurnServers(const std::vector<std::string> &uris) {turnServers_ = uris;} |
||||
|
||||
signals: |
||||
void offerCreated(const std::string &sdp, const std::vector<mtx::events::msg::CallCandidates::Candidate>&); |
||||
void answerCreated(const std::string &sdp, const std::vector<mtx::events::msg::CallCandidates::Candidate>&); |
||||
void pipelineChanged(bool started); |
||||
|
||||
private: |
||||
WebRTCSession() : QObject() {} |
||||
|
||||
bool initialised_ = false; |
||||
GstElement *pipe_ = nullptr; |
||||
GstElement *webrtc_ = nullptr; |
||||
std::string stunServer_; |
||||
std::vector<std::string> turnServers_; |
||||
|
||||
bool startPipeline(int opusPayloadType); |
||||
bool createPipeline(int opusPayloadType); |
||||
void addTurnServers(); |
||||
|
||||
public: |
||||
WebRTCSession(WebRTCSession const&) = delete; |
||||
void operator=(WebRTCSession const&) = delete; |
||||
}; |
@ -0,0 +1,53 @@ |
||||
#include <QLabel> |
||||
#include <QPushButton> |
||||
#include <QVBoxLayout> |
||||
|
||||
#include "Config.h" |
||||
#include "dialogs/AcceptCall.h" |
||||
|
||||
namespace dialogs { |
||||
|
||||
AcceptCall::AcceptCall(const QString &caller, const QString &displayName, QWidget *parent) |
||||
: QWidget(parent) |
||||
{ |
||||
setAutoFillBackground(true); |
||||
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); |
||||
setWindowModality(Qt::WindowModal); |
||||
setAttribute(Qt::WA_DeleteOnClose, true); |
||||
|
||||
auto layout = new QVBoxLayout(this); |
||||
layout->setSpacing(conf::modals::WIDGET_SPACING); |
||||
layout->setMargin(conf::modals::WIDGET_MARGIN); |
||||
|
||||
auto buttonLayout = new QHBoxLayout(); |
||||
buttonLayout->setSpacing(15); |
||||
buttonLayout->setMargin(0); |
||||
|
||||
acceptBtn_ = new QPushButton(tr("Accept"), this); |
||||
acceptBtn_->setDefault(true); |
||||
rejectBtn_ = new QPushButton(tr("Reject"), this); |
||||
|
||||
buttonLayout->addStretch(1); |
||||
buttonLayout->addWidget(acceptBtn_); |
||||
buttonLayout->addWidget(rejectBtn_); |
||||
|
||||
QLabel *label; |
||||
if (!displayName.isEmpty() && displayName != caller) |
||||
label = new QLabel("Accept call from " + displayName + " (" + caller + ")?", this); |
||||
else |
||||
label = new QLabel("Accept call from " + caller + "?", this); |
||||
|
||||
layout->addWidget(label); |
||||
layout->addLayout(buttonLayout); |
||||
|
||||
connect(acceptBtn_, &QPushButton::clicked, this, [this]() { |
||||
emit accept(); |
||||
emit close(); |
||||
}); |
||||
connect(rejectBtn_, &QPushButton::clicked, this, [this]() { |
||||
emit reject(); |
||||
emit close(); |
||||
}); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,26 @@ |
||||
#pragma once |
||||
|
||||
#include <QString> |
||||
#include <QWidget> |
||||
|
||||
class QPushButton; |
||||
|
||||
namespace dialogs { |
||||
|
||||
class AcceptCall : public QWidget |
||||
{ |
||||
Q_OBJECT |
||||
|
||||
public: |
||||
AcceptCall(const QString &caller, const QString &displayName, QWidget *parent = nullptr); |
||||
|
||||
signals: |
||||
void accept(); |
||||
void reject(); |
||||
|
||||
private: |
||||
QPushButton *acceptBtn_; |
||||
QPushButton *rejectBtn_; |
||||
}; |
||||
|
||||
} |
@ -0,0 +1,60 @@ |
||||
#include <QLabel> |
||||
#include <QPushButton> |
||||
#include <QString> |
||||
#include <QVBoxLayout> |
||||
|
||||
#include "Config.h" |
||||
#include "dialogs/PlaceCall.h" |
||||
|
||||
namespace dialogs { |
||||
|
||||
PlaceCall::PlaceCall(const QString &callee, const QString &displayName, QWidget *parent) |
||||
: QWidget(parent) |
||||
{ |
||||
setAutoFillBackground(true); |
||||
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); |
||||
setWindowModality(Qt::WindowModal); |
||||
setAttribute(Qt::WA_DeleteOnClose, true); |
||||
|
||||
auto layout = new QVBoxLayout(this); |
||||
layout->setSpacing(conf::modals::WIDGET_SPACING); |
||||
layout->setMargin(conf::modals::WIDGET_MARGIN); |
||||
|
||||
auto buttonLayout = new QHBoxLayout(); |
||||
buttonLayout->setSpacing(15); |
||||
buttonLayout->setMargin(0); |
||||
|
||||
voiceBtn_ = new QPushButton(tr("Voice Call"), this); |
||||
voiceBtn_->setDefault(true); |
||||
videoBtn_ = new QPushButton(tr("Video Call"), this); |
||||
cancelBtn_ = new QPushButton(tr("Cancel"), this); |
||||
|
||||
buttonLayout->addStretch(1); |
||||
buttonLayout->addWidget(voiceBtn_); |
||||
buttonLayout->addWidget(videoBtn_); |
||||
buttonLayout->addWidget(cancelBtn_); |
||||
|
||||
QLabel *label; |
||||
if (!displayName.isEmpty() && displayName != callee) |
||||
label = new QLabel("Place a call to " + displayName + " (" + callee + ")?", this); |
||||
else |
||||
label = new QLabel("Place a call to " + callee + "?", this); |
||||
|
||||
layout->addWidget(label); |
||||
layout->addLayout(buttonLayout); |
||||
|
||||
connect(voiceBtn_, &QPushButton::clicked, this, [this]() { |
||||
emit voice(); |
||||
emit close(); |
||||
}); |
||||
connect(videoBtn_, &QPushButton::clicked, this, [this]() { |
||||
emit video(); |
||||
emit close(); |
||||
}); |
||||
connect(cancelBtn_, &QPushButton::clicked, this, [this]() { |
||||
emit cancel(); |
||||
emit close(); |
||||
}); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,28 @@ |
||||
#pragma once |
||||
|
||||
#include <QWidget> |
||||
|
||||
class QPushButton; |
||||
class QString; |
||||
|
||||
namespace dialogs { |
||||
|
||||
class PlaceCall : public QWidget |
||||
{ |
||||
Q_OBJECT |
||||
|
||||
public: |
||||
PlaceCall(const QString &callee, const QString &displayName, QWidget *parent = nullptr); |
||||
|
||||
signals: |
||||
void voice(); |
||||
void video(); |
||||
void cancel(); |
||||
|
||||
private: |
||||
QPushButton *voiceBtn_; |
||||
QPushButton *videoBtn_; |
||||
QPushButton *cancelBtn_; |
||||
}; |
||||
|
||||
} |
Loading…
Reference in new issue