forked from mirror/nheko
commit
3fece53eb7
After Width: | Height: | Size: 643 B |
After Width: | Height: | Size: 1.1 KiB |
After Width: | Height: | Size: 1.1 KiB |
After Width: | Height: | Size: 759 B |
@ -0,0 +1,5 @@ |
|||||||
|
The below media files were obtained from https://github.com/matrix-org/matrix-react-sdk/tree/develop/res/media |
||||||
|
|
||||||
|
callend.ogg |
||||||
|
ringback.ogg |
||||||
|
ring.ogg |
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,160 @@ |
|||||||
|
#include <cstdio> |
||||||
|
|
||||||
|
#include <QDateTime> |
||||||
|
#include <QHBoxLayout> |
||||||
|
#include <QIcon> |
||||||
|
#include <QLabel> |
||||||
|
#include <QString> |
||||||
|
#include <QTimer> |
||||||
|
|
||||||
|
#include "ActiveCallBar.h" |
||||||
|
#include "ChatPage.h" |
||||||
|
#include "Utils.h" |
||||||
|
#include "WebRTCSession.h" |
||||||
|
#include "ui/Avatar.h" |
||||||
|
#include "ui/FlatButton.h" |
||||||
|
|
||||||
|
ActiveCallBar::ActiveCallBar(QWidget *parent) |
||||||
|
: QWidget(parent) |
||||||
|
{ |
||||||
|
setAutoFillBackground(true); |
||||||
|
auto p = palette(); |
||||||
|
p.setColor(backgroundRole(), QColor(46, 204, 113)); |
||||||
|
setPalette(p); |
||||||
|
|
||||||
|
QFont f; |
||||||
|
f.setPointSizeF(f.pointSizeF()); |
||||||
|
|
||||||
|
const int fontHeight = QFontMetrics(f).height(); |
||||||
|
const int widgetMargin = fontHeight / 3; |
||||||
|
const int contentHeight = fontHeight * 3; |
||||||
|
|
||||||
|
setFixedHeight(contentHeight + widgetMargin); |
||||||
|
|
||||||
|
layout_ = new QHBoxLayout(this); |
||||||
|
layout_->setSpacing(widgetMargin); |
||||||
|
layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin); |
||||||
|
|
||||||
|
QFont labelFont; |
||||||
|
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1); |
||||||
|
labelFont.setWeight(QFont::Medium); |
||||||
|
|
||||||
|
avatar_ = new Avatar(this, QFontMetrics(f).height() * 2.5); |
||||||
|
|
||||||
|
callPartyLabel_ = new QLabel(this); |
||||||
|
callPartyLabel_->setFont(labelFont); |
||||||
|
|
||||||
|
stateLabel_ = new QLabel(this); |
||||||
|
stateLabel_->setFont(labelFont); |
||||||
|
|
||||||
|
durationLabel_ = new QLabel(this); |
||||||
|
durationLabel_->setFont(labelFont); |
||||||
|
durationLabel_->hide(); |
||||||
|
|
||||||
|
muteBtn_ = new FlatButton(this); |
||||||
|
setMuteIcon(false); |
||||||
|
muteBtn_->setFixedSize(buttonSize_, buttonSize_); |
||||||
|
muteBtn_->setCornerRadius(buttonSize_ / 2); |
||||||
|
connect(muteBtn_, &FlatButton::clicked, this, [this]() { |
||||||
|
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_)) |
||||||
|
setMuteIcon(muted_); |
||||||
|
}); |
||||||
|
|
||||||
|
layout_->addWidget(avatar_, 0, Qt::AlignLeft); |
||||||
|
layout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft); |
||||||
|
layout_->addWidget(stateLabel_, 0, Qt::AlignLeft); |
||||||
|
layout_->addWidget(durationLabel_, 0, Qt::AlignLeft); |
||||||
|
layout_->addStretch(); |
||||||
|
layout_->addWidget(muteBtn_, 0, Qt::AlignCenter); |
||||||
|
layout_->addSpacing(18); |
||||||
|
|
||||||
|
timer_ = new QTimer(this); |
||||||
|
connect(timer_, &QTimer::timeout, this, [this]() { |
||||||
|
auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_; |
||||||
|
int s = seconds % 60; |
||||||
|
int m = (seconds / 60) % 60; |
||||||
|
int h = seconds / 3600; |
||||||
|
char buf[12]; |
||||||
|
if (h) |
||||||
|
snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s); |
||||||
|
else |
||||||
|
snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s); |
||||||
|
durationLabel_->setText(buf); |
||||||
|
}); |
||||||
|
|
||||||
|
connect( |
||||||
|
&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
ActiveCallBar::setMuteIcon(bool muted) |
||||||
|
{ |
||||||
|
QIcon icon; |
||||||
|
if (muted) { |
||||||
|
muteBtn_->setToolTip("Unmute Mic"); |
||||||
|
icon.addFile(":/icons/icons/ui/microphone-unmute.png"); |
||||||
|
} else { |
||||||
|
muteBtn_->setToolTip("Mute Mic"); |
||||||
|
icon.addFile(":/icons/icons/ui/microphone-mute.png"); |
||||||
|
} |
||||||
|
muteBtn_->setIcon(icon); |
||||||
|
muteBtn_->setIconSize(QSize(buttonSize_, buttonSize_)); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
ActiveCallBar::setCallParty(const QString &userid, |
||||||
|
const QString &displayName, |
||||||
|
const QString &roomName, |
||||||
|
const QString &avatarUrl) |
||||||
|
{ |
||||||
|
callPartyLabel_->setText(" " + (displayName.isEmpty() ? userid : displayName) + " "); |
||||||
|
|
||||||
|
if (!avatarUrl.isEmpty()) |
||||||
|
avatar_->setImage(avatarUrl); |
||||||
|
else |
||||||
|
avatar_->setLetter(utils::firstChar(roomName)); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
ActiveCallBar::update(WebRTCSession::State state) |
||||||
|
{ |
||||||
|
switch (state) { |
||||||
|
case WebRTCSession::State::INITIATING: |
||||||
|
show(); |
||||||
|
stateLabel_->setText("Initiating call..."); |
||||||
|
break; |
||||||
|
case WebRTCSession::State::INITIATED: |
||||||
|
show(); |
||||||
|
stateLabel_->setText("Call initiated..."); |
||||||
|
break; |
||||||
|
case WebRTCSession::State::OFFERSENT: |
||||||
|
show(); |
||||||
|
stateLabel_->setText("Calling..."); |
||||||
|
break; |
||||||
|
case WebRTCSession::State::CONNECTING: |
||||||
|
show(); |
||||||
|
stateLabel_->setText("Connecting..."); |
||||||
|
break; |
||||||
|
case WebRTCSession::State::CONNECTED: |
||||||
|
show(); |
||||||
|
callStartTime_ = QDateTime::currentSecsSinceEpoch(); |
||||||
|
timer_->start(1000); |
||||||
|
stateLabel_->setPixmap( |
||||||
|
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_))); |
||||||
|
durationLabel_->setText("00:00"); |
||||||
|
durationLabel_->show(); |
||||||
|
break; |
||||||
|
case WebRTCSession::State::ICEFAILED: |
||||||
|
case WebRTCSession::State::DISCONNECTED: |
||||||
|
hide(); |
||||||
|
timer_->stop(); |
||||||
|
callPartyLabel_->setText(QString()); |
||||||
|
stateLabel_->setText(QString()); |
||||||
|
durationLabel_->setText(QString()); |
||||||
|
durationLabel_->hide(); |
||||||
|
setMuteIcon(false); |
||||||
|
break; |
||||||
|
default: |
||||||
|
break; |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,40 @@ |
|||||||
|
#pragma once |
||||||
|
|
||||||
|
#include <QWidget> |
||||||
|
|
||||||
|
#include "WebRTCSession.h" |
||||||
|
|
||||||
|
class QHBoxLayout; |
||||||
|
class QLabel; |
||||||
|
class QTimer; |
||||||
|
class Avatar; |
||||||
|
class FlatButton; |
||||||
|
|
||||||
|
class ActiveCallBar : public QWidget |
||||||
|
{ |
||||||
|
Q_OBJECT |
||||||
|
|
||||||
|
public: |
||||||
|
ActiveCallBar(QWidget *parent = nullptr); |
||||||
|
|
||||||
|
public slots: |
||||||
|
void update(WebRTCSession::State); |
||||||
|
void setCallParty(const QString &userid, |
||||||
|
const QString &displayName, |
||||||
|
const QString &roomName, |
||||||
|
const QString &avatarUrl); |
||||||
|
|
||||||
|
private: |
||||||
|
QHBoxLayout *layout_ = nullptr; |
||||||
|
Avatar *avatar_ = nullptr; |
||||||
|
QLabel *callPartyLabel_ = nullptr; |
||||||
|
QLabel *stateLabel_ = nullptr; |
||||||
|
QLabel *durationLabel_ = nullptr; |
||||||
|
FlatButton *muteBtn_ = nullptr; |
||||||
|
int buttonSize_ = 22; |
||||||
|
bool muted_ = false; |
||||||
|
qint64 callStartTime_ = 0; |
||||||
|
QTimer *timer_ = nullptr; |
||||||
|
|
||||||
|
void setMuteIcon(bool muted); |
||||||
|
}; |
@ -0,0 +1,458 @@ |
|||||||
|
#include <algorithm> |
||||||
|
#include <cctype> |
||||||
|
#include <chrono> |
||||||
|
#include <cstdint> |
||||||
|
|
||||||
|
#include <QMediaPlaylist> |
||||||
|
#include <QUrl> |
||||||
|
|
||||||
|
#include "Cache.h" |
||||||
|
#include "CallManager.h" |
||||||
|
#include "ChatPage.h" |
||||||
|
#include "Logging.h" |
||||||
|
#include "MainWindow.h" |
||||||
|
#include "MatrixClient.h" |
||||||
|
#include "UserSettingsPage.h" |
||||||
|
#include "WebRTCSession.h" |
||||||
|
#include "dialogs/AcceptCall.h" |
||||||
|
|
||||||
|
#include "mtx/responses/turn_server.hpp" |
||||||
|
|
||||||
|
Q_DECLARE_METATYPE(std::vector<mtx::events::msg::CallCandidates::Candidate>) |
||||||
|
Q_DECLARE_METATYPE(mtx::events::msg::CallCandidates::Candidate) |
||||||
|
Q_DECLARE_METATYPE(mtx::responses::TurnServer) |
||||||
|
|
||||||
|
using namespace mtx::events; |
||||||
|
using namespace mtx::events::msg; |
||||||
|
|
||||||
|
// https://github.com/vector-im/riot-web/issues/10173
|
||||||
|
#define STUN_SERVER "stun://turn.matrix.org:3478"
|
||||||
|
|
||||||
|
namespace { |
||||||
|
std::vector<std::string> |
||||||
|
getTurnURIs(const mtx::responses::TurnServer &turnServer); |
||||||
|
} |
||||||
|
|
||||||
|
CallManager::CallManager(QSharedPointer<UserSettings> userSettings) |
||||||
|
: QObject() |
||||||
|
, session_(WebRTCSession::instance()) |
||||||
|
, turnServerTimer_(this) |
||||||
|
, settings_(userSettings) |
||||||
|
{ |
||||||
|
qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>(); |
||||||
|
qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>(); |
||||||
|
qRegisterMetaType<mtx::responses::TurnServer>(); |
||||||
|
|
||||||
|
connect( |
||||||
|
&session_, |
||||||
|
&WebRTCSession::offerCreated, |
||||||
|
this, |
||||||
|
[this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) { |
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_); |
||||||
|
emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_}); |
||||||
|
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); |
||||||
|
QTimer::singleShot(timeoutms_, this, [this]() { |
||||||
|
if (session_.state() == WebRTCSession::State::OFFERSENT) { |
||||||
|
hangUp(CallHangUp::Reason::InviteTimeOut); |
||||||
|
emit ChatPage::instance()->showNotification( |
||||||
|
"The remote side failed to pick up."); |
||||||
|
} |
||||||
|
}); |
||||||
|
}); |
||||||
|
|
||||||
|
connect( |
||||||
|
&session_, |
||||||
|
&WebRTCSession::answerCreated, |
||||||
|
this, |
||||||
|
[this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) { |
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_); |
||||||
|
emit newMessage(roomid_, CallAnswer{callid_, sdp, 0}); |
||||||
|
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); |
||||||
|
}); |
||||||
|
|
||||||
|
connect(&session_, |
||||||
|
&WebRTCSession::newICECandidate, |
||||||
|
this, |
||||||
|
[this](const CallCandidates::Candidate &candidate) { |
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_); |
||||||
|
emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0}); |
||||||
|
}); |
||||||
|
|
||||||
|
connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer); |
||||||
|
|
||||||
|
connect(this, |
||||||
|
&CallManager::turnServerRetrieved, |
||||||
|
this, |
||||||
|
[this](const mtx::responses::TurnServer &res) { |
||||||
|
nhlog::net()->info("TURN server(s) retrieved from homeserver:"); |
||||||
|
nhlog::net()->info("username: {}", res.username); |
||||||
|
nhlog::net()->info("ttl: {} seconds", res.ttl); |
||||||
|
for (const auto &u : res.uris) |
||||||
|
nhlog::net()->info("uri: {}", u); |
||||||
|
|
||||||
|
// Request new credentials close to expiry
|
||||||
|
// See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00
|
||||||
|
turnURIs_ = getTurnURIs(res); |
||||||
|
uint32_t ttl = std::max(res.ttl, UINT32_C(3600)); |
||||||
|
if (res.ttl < 3600) |
||||||
|
nhlog::net()->warn("Setting ttl to 1 hour"); |
||||||
|
turnServerTimer_.setInterval(ttl * 1000 * 0.9); |
||||||
|
}); |
||||||
|
|
||||||
|
connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) { |
||||||
|
switch (state) { |
||||||
|
case WebRTCSession::State::DISCONNECTED: |
||||||
|
playRingtone("qrc:/media/media/callend.ogg", false); |
||||||
|
clear(); |
||||||
|
break; |
||||||
|
case WebRTCSession::State::ICEFAILED: { |
||||||
|
QString error("Call connection failed."); |
||||||
|
if (turnURIs_.empty()) |
||||||
|
error += " Your homeserver has no configured TURN server."; |
||||||
|
emit ChatPage::instance()->showNotification(error); |
||||||
|
hangUp(CallHangUp::Reason::ICEFailed); |
||||||
|
break; |
||||||
|
} |
||||||
|
default: |
||||||
|
break; |
||||||
|
} |
||||||
|
}); |
||||||
|
|
||||||
|
connect(&player_, |
||||||
|
&QMediaPlayer::mediaStatusChanged, |
||||||
|
this, |
||||||
|
[this](QMediaPlayer::MediaStatus status) { |
||||||
|
if (status == QMediaPlayer::LoadedMedia) |
||||||
|
player_.play(); |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::sendInvite(const QString &roomid) |
||||||
|
{ |
||||||
|
if (onActiveCall()) |
||||||
|
return; |
||||||
|
|
||||||
|
auto roomInfo = cache::singleRoomInfo(roomid.toStdString()); |
||||||
|
if (roomInfo.member_count != 2) { |
||||||
|
emit ChatPage::instance()->showNotification( |
||||||
|
"Voice calls are limited to 1:1 rooms."); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
std::string errorMessage; |
||||||
|
if (!session_.init(&errorMessage)) { |
||||||
|
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
roomid_ = roomid; |
||||||
|
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); |
||||||
|
session_.setTurnServers(turnURIs_); |
||||||
|
|
||||||
|
generateCallID(); |
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_); |
||||||
|
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString())); |
||||||
|
const RoomMember &callee = |
||||||
|
members.front().user_id == utils::localUser() ? members.back() : members.front(); |
||||||
|
emit newCallParty(callee.user_id, |
||||||
|
callee.display_name, |
||||||
|
QString::fromStdString(roomInfo.name), |
||||||
|
QString::fromStdString(roomInfo.avatar_url)); |
||||||
|
playRingtone("qrc:/media/media/ringback.ogg", true); |
||||||
|
if (!session_.createOffer()) { |
||||||
|
emit ChatPage::instance()->showNotification("Problem setting up call."); |
||||||
|
endCall(); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
namespace { |
||||||
|
std::string |
||||||
|
callHangUpReasonString(CallHangUp::Reason reason) |
||||||
|
{ |
||||||
|
switch (reason) { |
||||||
|
case CallHangUp::Reason::ICEFailed: |
||||||
|
return "ICE failed"; |
||||||
|
case CallHangUp::Reason::InviteTimeOut: |
||||||
|
return "Invite time out"; |
||||||
|
default: |
||||||
|
return "User"; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::hangUp(CallHangUp::Reason reason) |
||||||
|
{ |
||||||
|
if (!callid_.empty()) { |
||||||
|
nhlog::ui()->debug( |
||||||
|
"WebRTC: call id: {} - hanging up ({})", callid_, callHangUpReasonString(reason)); |
||||||
|
emit newMessage(roomid_, CallHangUp{callid_, 0, reason}); |
||||||
|
endCall(); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
CallManager::onActiveCall() |
||||||
|
{ |
||||||
|
return session_.state() != WebRTCSession::State::DISCONNECTED; |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event) |
||||||
|
{ |
||||||
|
#ifdef GSTREAMER_AVAILABLE |
||||||
|
if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event) || |
||||||
|
handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event)) |
||||||
|
return; |
||||||
|
#else |
||||||
|
(void)event; |
||||||
|
#endif |
||||||
|
} |
||||||
|
|
||||||
|
template<typename T> |
||||||
|
bool |
||||||
|
CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event) |
||||||
|
{ |
||||||
|
if (std::holds_alternative<RoomEvent<T>>(event)) { |
||||||
|
handleEvent(std::get<RoomEvent<T>>(event)); |
||||||
|
return true; |
||||||
|
} |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent) |
||||||
|
{ |
||||||
|
const char video[] = "m=video"; |
||||||
|
const std::string &sdp = callInviteEvent.content.sdp; |
||||||
|
bool isVideo = std::search(sdp.cbegin(), |
||||||
|
sdp.cend(), |
||||||
|
std::cbegin(video), |
||||||
|
std::cend(video) - 1, |
||||||
|
[](unsigned char c1, unsigned char c2) { |
||||||
|
return std::tolower(c1) == std::tolower(c2); |
||||||
|
}) != sdp.cend(); |
||||||
|
|
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - incoming {} CallInvite from {}", |
||||||
|
callInviteEvent.content.call_id, |
||||||
|
(isVideo ? "video" : "voice"), |
||||||
|
callInviteEvent.sender); |
||||||
|
|
||||||
|
if (callInviteEvent.content.call_id.empty()) |
||||||
|
return; |
||||||
|
|
||||||
|
auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id); |
||||||
|
if (onActiveCall() || roomInfo.member_count != 2 || isVideo) { |
||||||
|
emit newMessage(QString::fromStdString(callInviteEvent.room_id), |
||||||
|
CallHangUp{callInviteEvent.content.call_id, |
||||||
|
0, |
||||||
|
CallHangUp::Reason::InviteTimeOut}); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
playRingtone("qrc:/media/media/ring.ogg", true); |
||||||
|
roomid_ = QString::fromStdString(callInviteEvent.room_id); |
||||||
|
callid_ = callInviteEvent.content.call_id; |
||||||
|
remoteICECandidates_.clear(); |
||||||
|
|
||||||
|
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id)); |
||||||
|
const RoomMember &caller = |
||||||
|
members.front().user_id == utils::localUser() ? members.back() : members.front(); |
||||||
|
emit newCallParty(caller.user_id, |
||||||
|
caller.display_name, |
||||||
|
QString::fromStdString(roomInfo.name), |
||||||
|
QString::fromStdString(roomInfo.avatar_url)); |
||||||
|
|
||||||
|
auto dialog = new dialogs::AcceptCall(caller.user_id, |
||||||
|
caller.display_name, |
||||||
|
QString::fromStdString(roomInfo.name), |
||||||
|
QString::fromStdString(roomInfo.avatar_url), |
||||||
|
settings_, |
||||||
|
MainWindow::instance()); |
||||||
|
connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() { |
||||||
|
MainWindow::instance()->hideOverlay(); |
||||||
|
answerInvite(callInviteEvent.content); |
||||||
|
}); |
||||||
|
connect(dialog, &dialogs::AcceptCall::reject, this, [this]() { |
||||||
|
MainWindow::instance()->hideOverlay(); |
||||||
|
hangUp(); |
||||||
|
}); |
||||||
|
MainWindow::instance()->showSolidOverlayModal(dialog); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::answerInvite(const CallInvite &invite) |
||||||
|
{ |
||||||
|
stopRingtone(); |
||||||
|
std::string errorMessage; |
||||||
|
if (!session_.init(&errorMessage)) { |
||||||
|
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); |
||||||
|
hangUp(); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : ""); |
||||||
|
session_.setTurnServers(turnURIs_); |
||||||
|
|
||||||
|
if (!session_.acceptOffer(invite.sdp)) { |
||||||
|
emit ChatPage::instance()->showNotification("Problem setting up call."); |
||||||
|
hangUp(); |
||||||
|
return; |
||||||
|
} |
||||||
|
session_.acceptICECandidates(remoteICECandidates_); |
||||||
|
remoteICECandidates_.clear(); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent) |
||||||
|
{ |
||||||
|
if (callCandidatesEvent.sender == utils::localUser().toStdString()) |
||||||
|
return; |
||||||
|
|
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}", |
||||||
|
callCandidatesEvent.content.call_id, |
||||||
|
callCandidatesEvent.sender); |
||||||
|
|
||||||
|
if (callid_ == callCandidatesEvent.content.call_id) { |
||||||
|
if (onActiveCall()) |
||||||
|
session_.acceptICECandidates(callCandidatesEvent.content.candidates); |
||||||
|
else { |
||||||
|
// CallInvite has been received and we're awaiting localUser to accept or
|
||||||
|
// reject the call
|
||||||
|
for (const auto &c : callCandidatesEvent.content.candidates) |
||||||
|
remoteICECandidates_.push_back(c); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent) |
||||||
|
{ |
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}", |
||||||
|
callAnswerEvent.content.call_id, |
||||||
|
callAnswerEvent.sender); |
||||||
|
|
||||||
|
if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() && |
||||||
|
callid_ == callAnswerEvent.content.call_id) { |
||||||
|
emit ChatPage::instance()->showNotification("Call answered on another device."); |
||||||
|
stopRingtone(); |
||||||
|
MainWindow::instance()->hideOverlay(); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) { |
||||||
|
stopRingtone(); |
||||||
|
if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) { |
||||||
|
emit ChatPage::instance()->showNotification("Problem setting up call."); |
||||||
|
hangUp(); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent) |
||||||
|
{ |
||||||
|
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}", |
||||||
|
callHangUpEvent.content.call_id, |
||||||
|
callHangUpReasonString(callHangUpEvent.content.reason), |
||||||
|
callHangUpEvent.sender); |
||||||
|
|
||||||
|
if (callid_ == callHangUpEvent.content.call_id) { |
||||||
|
MainWindow::instance()->hideOverlay(); |
||||||
|
endCall(); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::generateCallID() |
||||||
|
{ |
||||||
|
using namespace std::chrono; |
||||||
|
uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count(); |
||||||
|
callid_ = "c" + std::to_string(ms); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::clear() |
||||||
|
{ |
||||||
|
roomid_.clear(); |
||||||
|
callid_.clear(); |
||||||
|
remoteICECandidates_.clear(); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::endCall() |
||||||
|
{ |
||||||
|
stopRingtone(); |
||||||
|
clear(); |
||||||
|
session_.end(); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::refreshTurnServer() |
||||||
|
{ |
||||||
|
turnURIs_.clear(); |
||||||
|
turnServerTimer_.start(2000); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::retrieveTurnServer() |
||||||
|
{ |
||||||
|
http::client()->get_turn_server( |
||||||
|
[this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) { |
||||||
|
if (err) { |
||||||
|
turnServerTimer_.setInterval(5000); |
||||||
|
return; |
||||||
|
} |
||||||
|
emit turnServerRetrieved(res); |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::playRingtone(const QString &ringtone, bool repeat) |
||||||
|
{ |
||||||
|
static QMediaPlaylist playlist; |
||||||
|
playlist.clear(); |
||||||
|
playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop |
||||||
|
: QMediaPlaylist::CurrentItemOnce); |
||||||
|
playlist.addMedia(QUrl(ringtone)); |
||||||
|
player_.setVolume(100); |
||||||
|
player_.setPlaylist(&playlist); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
CallManager::stopRingtone() |
||||||
|
{ |
||||||
|
player_.setPlaylist(nullptr); |
||||||
|
} |
||||||
|
|
||||||
|
namespace { |
||||||
|
std::vector<std::string> |
||||||
|
getTurnURIs(const mtx::responses::TurnServer &turnServer) |
||||||
|
{ |
||||||
|
// gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp)
|
||||||
|
// where username and password are percent-encoded
|
||||||
|
std::vector<std::string> ret; |
||||||
|
for (const auto &uri : turnServer.uris) { |
||||||
|
if (auto c = uri.find(':'); c == std::string::npos) { |
||||||
|
nhlog::ui()->error("Invalid TURN server uri: {}", uri); |
||||||
|
continue; |
||||||
|
} else { |
||||||
|
std::string scheme = std::string(uri, 0, c); |
||||||
|
if (scheme != "turn" && scheme != "turns") { |
||||||
|
nhlog::ui()->error("Invalid TURN server uri: {}", uri); |
||||||
|
continue; |
||||||
|
} |
||||||
|
|
||||||
|
QString encodedUri = |
||||||
|
QString::fromStdString(scheme) + "://" + |
||||||
|
QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) + |
||||||
|
":" + |
||||||
|
QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) + |
||||||
|
"@" + QString::fromStdString(std::string(uri, ++c)); |
||||||
|
ret.push_back(encodedUri.toStdString()); |
||||||
|
} |
||||||
|
} |
||||||
|
return ret; |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,75 @@ |
|||||||
|
#pragma once |
||||||
|
|
||||||
|
#include <string> |
||||||
|
#include <vector> |
||||||
|
|
||||||
|
#include <QMediaPlayer> |
||||||
|
#include <QObject> |
||||||
|
#include <QSharedPointer> |
||||||
|
#include <QString> |
||||||
|
#include <QTimer> |
||||||
|
|
||||||
|
#include "mtx/events/collections.hpp" |
||||||
|
#include "mtx/events/voip.hpp" |
||||||
|
|
||||||
|
namespace mtx::responses { |
||||||
|
struct TurnServer; |
||||||
|
} |
||||||
|
|
||||||
|
class UserSettings; |
||||||
|
class WebRTCSession; |
||||||
|
|
||||||
|
class CallManager : public QObject |
||||||
|
{ |
||||||
|
Q_OBJECT |
||||||
|
|
||||||
|
public: |
||||||
|
CallManager(QSharedPointer<UserSettings>); |
||||||
|
|
||||||
|
void sendInvite(const QString &roomid); |
||||||
|
void hangUp( |
||||||
|
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User); |
||||||
|
bool onActiveCall(); |
||||||
|
void refreshTurnServer(); |
||||||
|
|
||||||
|
public slots: |
||||||
|
void syncEvent(const mtx::events::collections::TimelineEvents &event); |
||||||
|
|
||||||
|
signals: |
||||||
|
void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &); |
||||||
|
void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates &); |
||||||
|
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &); |
||||||
|
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &); |
||||||
|
void turnServerRetrieved(const mtx::responses::TurnServer &); |
||||||
|
void newCallParty(const QString &userid, |
||||||
|
const QString &displayName, |
||||||
|
const QString &roomName, |
||||||
|
const QString &avatarUrl); |
||||||
|
|
||||||
|
private slots: |
||||||
|
void retrieveTurnServer(); |
||||||
|
|
||||||
|
private: |
||||||
|
WebRTCSession &session_; |
||||||
|
QString roomid_; |
||||||
|
std::string callid_; |
||||||
|
const uint32_t timeoutms_ = 120000; |
||||||
|
std::vector<mtx::events::msg::CallCandidates::Candidate> remoteICECandidates_; |
||||||
|
std::vector<std::string> turnURIs_; |
||||||
|
QTimer turnServerTimer_; |
||||||
|
QSharedPointer<UserSettings> settings_; |
||||||
|
QMediaPlayer player_; |
||||||
|
|
||||||
|
template<typename T> |
||||||
|
bool handleEvent_(const mtx::events::collections::TimelineEvents &event); |
||||||
|
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &); |
||||||
|
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &); |
||||||
|
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &); |
||||||
|
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &); |
||||||
|
void answerInvite(const mtx::events::msg::CallInvite &); |
||||||
|
void generateCallID(); |
||||||
|
void clear(); |
||||||
|
void endCall(); |
||||||
|
void playRingtone(const QString &ringtone, bool repeat); |
||||||
|
void stopRingtone(); |
||||||
|
}; |
@ -0,0 +1,697 @@ |
|||||||
|
#include <cctype> |
||||||
|
|
||||||
|
#include "Logging.h" |
||||||
|
#include "WebRTCSession.h" |
||||||
|
|
||||||
|
#ifdef GSTREAMER_AVAILABLE |
||||||
|
extern "C" |
||||||
|
{ |
||||||
|
#include "gst/gst.h" |
||||||
|
#include "gst/sdp/sdp.h" |
||||||
|
|
||||||
|
#define GST_USE_UNSTABLE_API |
||||||
|
#include "gst/webrtc/webrtc.h" |
||||||
|
} |
||||||
|
#endif |
||||||
|
|
||||||
|
Q_DECLARE_METATYPE(WebRTCSession::State) |
||||||
|
|
||||||
|
WebRTCSession::WebRTCSession() |
||||||
|
: QObject() |
||||||
|
{ |
||||||
|
qRegisterMetaType<WebRTCSession::State>(); |
||||||
|
connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState); |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::init(std::string *errorMessage) |
||||||
|
{ |
||||||
|
#ifdef GSTREAMER_AVAILABLE |
||||||
|
if (initialised_) |
||||||
|
return true; |
||||||
|
|
||||||
|
GError *error = nullptr; |
||||||
|
if (!gst_init_check(nullptr, nullptr, &error)) { |
||||||
|
std::string strError = std::string("WebRTC: failed to initialise GStreamer: "); |
||||||
|
if (error) { |
||||||
|
strError += error->message; |
||||||
|
g_error_free(error); |
||||||
|
} |
||||||
|
nhlog::ui()->error(strError); |
||||||
|
if (errorMessage) |
||||||
|
*errorMessage = strError; |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
gchar *version = gst_version_string(); |
||||||
|
std::string gstVersion(version); |
||||||
|
g_free(version); |
||||||
|
nhlog::ui()->info("WebRTC: initialised " + gstVersion); |
||||||
|
|
||||||
|
// GStreamer Plugins:
|
||||||
|
// Base: audioconvert, audioresample, opus, playback, volume
|
||||||
|
// Good: autodetect, rtpmanager
|
||||||
|
// Bad: dtls, srtp, webrtc
|
||||||
|
// libnice [GLib]: nice
|
||||||
|
initialised_ = true; |
||||||
|
std::string strError = gstVersion + ": Missing plugins: "; |
||||||
|
const gchar *needed[] = {"audioconvert", |
||||||
|
"audioresample", |
||||||
|
"autodetect", |
||||||
|
"dtls", |
||||||
|
"nice", |
||||||
|
"opus", |
||||||
|
"playback", |
||||||
|
"rtpmanager", |
||||||
|
"srtp", |
||||||
|
"volume", |
||||||
|
"webrtc", |
||||||
|
nullptr}; |
||||||
|
GstRegistry *registry = gst_registry_get(); |
||||||
|
for (guint i = 0; i < g_strv_length((gchar **)needed); i++) { |
||||||
|
GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); |
||||||
|
if (!plugin) { |
||||||
|
strError += std::string(needed[i]) + " "; |
||||||
|
initialised_ = false; |
||||||
|
continue; |
||||||
|
} |
||||||
|
gst_object_unref(plugin); |
||||||
|
} |
||||||
|
|
||||||
|
if (!initialised_) { |
||||||
|
nhlog::ui()->error(strError); |
||||||
|
if (errorMessage) |
||||||
|
*errorMessage = strError; |
||||||
|
} |
||||||
|
return initialised_; |
||||||
|
#else |
||||||
|
(void)errorMessage; |
||||||
|
return false; |
||||||
|
#endif |
||||||
|
} |
||||||
|
|
||||||
|
#ifdef GSTREAMER_AVAILABLE |
||||||
|
namespace { |
||||||
|
bool isoffering_; |
||||||
|
std::string localsdp_; |
||||||
|
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_; |
||||||
|
|
||||||
|
gboolean |
||||||
|
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) |
||||||
|
{ |
||||||
|
WebRTCSession *session = static_cast<WebRTCSession *>(user_data); |
||||||
|
switch (GST_MESSAGE_TYPE(msg)) { |
||||||
|
case GST_MESSAGE_EOS: |
||||||
|
nhlog::ui()->error("WebRTC: end of stream"); |
||||||
|
session->end(); |
||||||
|
break; |
||||||
|
case GST_MESSAGE_ERROR: |
||||||
|
GError *error; |
||||||
|
gchar *debug; |
||||||
|
gst_message_parse_error(msg, &error, &debug); |
||||||
|
nhlog::ui()->error( |
||||||
|
"WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message); |
||||||
|
g_clear_error(&error); |
||||||
|
g_free(debug); |
||||||
|
session->end(); |
||||||
|
break; |
||||||
|
default: |
||||||
|
break; |
||||||
|
} |
||||||
|
return TRUE; |
||||||
|
} |
||||||
|
|
||||||
|
GstWebRTCSessionDescription * |
||||||
|
parseSDP(const std::string &sdp, GstWebRTCSDPType type) |
||||||
|
{ |
||||||
|
GstSDPMessage *msg; |
||||||
|
gst_sdp_message_new(&msg); |
||||||
|
if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) { |
||||||
|
return gst_webrtc_session_description_new(type, msg); |
||||||
|
} else { |
||||||
|
nhlog::ui()->error("WebRTC: failed to parse remote session description"); |
||||||
|
gst_object_unref(msg); |
||||||
|
return nullptr; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
setLocalDescription(GstPromise *promise, gpointer webrtc) |
||||||
|
{ |
||||||
|
const GstStructure *reply = gst_promise_get_reply(promise); |
||||||
|
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer")); |
||||||
|
GstWebRTCSessionDescription *gstsdp = nullptr; |
||||||
|
gst_structure_get(reply, |
||||||
|
isAnswer ? "answer" : "offer", |
||||||
|
GST_TYPE_WEBRTC_SESSION_DESCRIPTION, |
||||||
|
&gstsdp, |
||||||
|
nullptr); |
||||||
|
gst_promise_unref(promise); |
||||||
|
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr); |
||||||
|
|
||||||
|
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp); |
||||||
|
localsdp_ = std::string(sdp); |
||||||
|
g_free(sdp); |
||||||
|
gst_webrtc_session_description_free(gstsdp); |
||||||
|
|
||||||
|
nhlog::ui()->debug( |
||||||
|
"WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
createOffer(GstElement *webrtc) |
||||||
|
{ |
||||||
|
// create-offer first, then set-local-description
|
||||||
|
GstPromise *promise = |
||||||
|
gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
||||||
|
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
createAnswer(GstPromise *promise, gpointer webrtc) |
||||||
|
{ |
||||||
|
// create-answer first, then set-local-description
|
||||||
|
gst_promise_unref(promise); |
||||||
|
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
||||||
|
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise); |
||||||
|
} |
||||||
|
|
||||||
|
#if GST_CHECK_VERSION(1, 17, 0) |
||||||
|
void |
||||||
|
iceGatheringStateChanged(GstElement *webrtc, |
||||||
|
GParamSpec *pspec G_GNUC_UNUSED, |
||||||
|
gpointer user_data G_GNUC_UNUSED) |
||||||
|
{ |
||||||
|
GstWebRTCICEGatheringState newState; |
||||||
|
g_object_get(webrtc, "ice-gathering-state", &newState, nullptr); |
||||||
|
if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) { |
||||||
|
nhlog::ui()->debug("WebRTC: GstWebRTCICEGatheringState -> Complete"); |
||||||
|
if (isoffering_) { |
||||||
|
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); |
||||||
|
emit WebRTCSession::instance().stateChanged( |
||||||
|
WebRTCSession::State::OFFERSENT); |
||||||
|
} else { |
||||||
|
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); |
||||||
|
emit WebRTCSession::instance().stateChanged( |
||||||
|
WebRTCSession::State::ANSWERSENT); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
#else |
||||||
|
|
||||||
|
gboolean |
||||||
|
onICEGatheringCompletion(gpointer timerid) |
||||||
|
{ |
||||||
|
*(guint *)(timerid) = 0; |
||||||
|
if (isoffering_) { |
||||||
|
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); |
||||||
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT); |
||||||
|
} else { |
||||||
|
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); |
||||||
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT); |
||||||
|
} |
||||||
|
return FALSE; |
||||||
|
} |
||||||
|
#endif |
||||||
|
|
||||||
|
void |
||||||
|
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, |
||||||
|
guint mlineIndex, |
||||||
|
gchar *candidate, |
||||||
|
gpointer G_GNUC_UNUSED) |
||||||
|
{ |
||||||
|
nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate); |
||||||
|
|
||||||
|
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) { |
||||||
|
emit WebRTCSession::instance().newICECandidate( |
||||||
|
{"audio", (uint16_t)mlineIndex, candidate}); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); |
||||||
|
|
||||||
|
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
|
||||||
|
// GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.17.
|
||||||
|
// Use a 100ms timeout in the meantime
|
||||||
|
#if !GST_CHECK_VERSION(1, 17, 0) |
||||||
|
static guint timerid = 0; |
||||||
|
if (timerid) |
||||||
|
g_source_remove(timerid); |
||||||
|
|
||||||
|
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid); |
||||||
|
#endif |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
iceConnectionStateChanged(GstElement *webrtc, |
||||||
|
GParamSpec *pspec G_GNUC_UNUSED, |
||||||
|
gpointer user_data G_GNUC_UNUSED) |
||||||
|
{ |
||||||
|
GstWebRTCICEConnectionState newState; |
||||||
|
g_object_get(webrtc, "ice-connection-state", &newState, nullptr); |
||||||
|
switch (newState) { |
||||||
|
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: |
||||||
|
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking"); |
||||||
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING); |
||||||
|
break; |
||||||
|
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: |
||||||
|
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed"); |
||||||
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED); |
||||||
|
break; |
||||||
|
default: |
||||||
|
break; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
||||||
|
{ |
||||||
|
GstCaps *caps = gst_pad_get_current_caps(newpad); |
||||||
|
if (!caps) |
||||||
|
return; |
||||||
|
|
||||||
|
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); |
||||||
|
gst_caps_unref(caps); |
||||||
|
|
||||||
|
GstPad *queuepad = nullptr; |
||||||
|
if (g_str_has_prefix(name, "audio")) { |
||||||
|
nhlog::ui()->debug("WebRTC: received incoming audio stream"); |
||||||
|
GstElement *queue = gst_element_factory_make("queue", nullptr); |
||||||
|
GstElement *convert = gst_element_factory_make("audioconvert", nullptr); |
||||||
|
GstElement *resample = gst_element_factory_make("audioresample", nullptr); |
||||||
|
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); |
||||||
|
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); |
||||||
|
gst_element_sync_state_with_parent(queue); |
||||||
|
gst_element_sync_state_with_parent(convert); |
||||||
|
gst_element_sync_state_with_parent(resample); |
||||||
|
gst_element_sync_state_with_parent(sink); |
||||||
|
gst_element_link_many(queue, convert, resample, sink, nullptr); |
||||||
|
queuepad = gst_element_get_static_pad(queue, "sink"); |
||||||
|
} |
||||||
|
|
||||||
|
if (queuepad) { |
||||||
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) |
||||||
|
nhlog::ui()->error("WebRTC: unable to link new pad"); |
||||||
|
else { |
||||||
|
emit WebRTCSession::instance().stateChanged( |
||||||
|
WebRTCSession::State::CONNECTED); |
||||||
|
} |
||||||
|
gst_object_unref(queuepad); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
||||||
|
{ |
||||||
|
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) |
||||||
|
return; |
||||||
|
|
||||||
|
nhlog::ui()->debug("WebRTC: received incoming stream"); |
||||||
|
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); |
||||||
|
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); |
||||||
|
gst_bin_add(GST_BIN(pipe), decodebin); |
||||||
|
gst_element_sync_state_with_parent(decodebin); |
||||||
|
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); |
||||||
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) |
||||||
|
nhlog::ui()->error("WebRTC: unable to link new pad"); |
||||||
|
gst_object_unref(sinkpad); |
||||||
|
} |
||||||
|
|
||||||
|
std::string::const_iterator |
||||||
|
findName(const std::string &sdp, const std::string &name) |
||||||
|
{ |
||||||
|
return std::search( |
||||||
|
sdp.cbegin(), |
||||||
|
sdp.cend(), |
||||||
|
name.cbegin(), |
||||||
|
name.cend(), |
||||||
|
[](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); }); |
||||||
|
} |
||||||
|
|
||||||
|
int |
||||||
|
getPayloadType(const std::string &sdp, const std::string &name) |
||||||
|
{ |
||||||
|
// eg a=rtpmap:111 opus/48000/2
|
||||||
|
auto e = findName(sdp, name); |
||||||
|
if (e == sdp.cend()) { |
||||||
|
nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing"); |
||||||
|
return -1; |
||||||
|
} |
||||||
|
|
||||||
|
if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) { |
||||||
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + |
||||||
|
" payload type"); |
||||||
|
return -1; |
||||||
|
} else { |
||||||
|
++s; |
||||||
|
try { |
||||||
|
return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s)); |
||||||
|
} catch (...) { |
||||||
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + |
||||||
|
" payload type"); |
||||||
|
} |
||||||
|
} |
||||||
|
return -1; |
||||||
|
} |
||||||
|
|
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::createOffer() |
||||||
|
{ |
||||||
|
isoffering_ = true; |
||||||
|
localsdp_.clear(); |
||||||
|
localcandidates_.clear(); |
||||||
|
return startPipeline(111); // a dynamic opus payload type
|
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::acceptOffer(const std::string &sdp) |
||||||
|
{ |
||||||
|
nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp); |
||||||
|
if (state_ != State::DISCONNECTED) |
||||||
|
return false; |
||||||
|
|
||||||
|
isoffering_ = false; |
||||||
|
localsdp_.clear(); |
||||||
|
localcandidates_.clear(); |
||||||
|
|
||||||
|
int opusPayloadType = getPayloadType(sdp, "opus"); |
||||||
|
if (opusPayloadType == -1) |
||||||
|
return false; |
||||||
|
|
||||||
|
GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER); |
||||||
|
if (!offer) |
||||||
|
return false; |
||||||
|
|
||||||
|
if (!startPipeline(opusPayloadType)) { |
||||||
|
gst_webrtc_session_description_free(offer); |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
// set-remote-description first, then create-answer
|
||||||
|
GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr); |
||||||
|
g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise); |
||||||
|
gst_webrtc_session_description_free(offer); |
||||||
|
return true; |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::acceptAnswer(const std::string &sdp) |
||||||
|
{ |
||||||
|
nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp); |
||||||
|
if (state_ != State::OFFERSENT) |
||||||
|
return false; |
||||||
|
|
||||||
|
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); |
||||||
|
if (!answer) { |
||||||
|
end(); |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr); |
||||||
|
gst_webrtc_session_description_free(answer); |
||||||
|
return true; |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
WebRTCSession::acceptICECandidates( |
||||||
|
const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates) |
||||||
|
{ |
||||||
|
if (state_ >= State::INITIATED) { |
||||||
|
for (const auto &c : candidates) { |
||||||
|
nhlog::ui()->debug( |
||||||
|
"WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate); |
||||||
|
g_signal_emit_by_name( |
||||||
|
webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str()); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::startPipeline(int opusPayloadType) |
||||||
|
{ |
||||||
|
if (state_ != State::DISCONNECTED) |
||||||
|
return false; |
||||||
|
|
||||||
|
emit stateChanged(State::INITIATING); |
||||||
|
|
||||||
|
if (!createPipeline(opusPayloadType)) |
||||||
|
return false; |
||||||
|
|
||||||
|
webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin"); |
||||||
|
|
||||||
|
if (!stunServer_.empty()) { |
||||||
|
nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_); |
||||||
|
g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr); |
||||||
|
} |
||||||
|
|
||||||
|
for (const auto &uri : turnServers_) { |
||||||
|
nhlog::ui()->info("WebRTC: setting TURN server: {}", uri); |
||||||
|
gboolean udata; |
||||||
|
g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata)); |
||||||
|
} |
||||||
|
if (turnServers_.empty()) |
||||||
|
nhlog::ui()->warn("WebRTC: no TURN server provided"); |
||||||
|
|
||||||
|
// generate the offer when the pipeline goes to PLAYING
|
||||||
|
if (isoffering_) |
||||||
|
g_signal_connect( |
||||||
|
webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr); |
||||||
|
|
||||||
|
// on-ice-candidate is emitted when a local ICE candidate has been gathered
|
||||||
|
g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr); |
||||||
|
|
||||||
|
// capture ICE failure
|
||||||
|
g_signal_connect( |
||||||
|
webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr); |
||||||
|
|
||||||
|
// incoming streams trigger pad-added
|
||||||
|
gst_element_set_state(pipe_, GST_STATE_READY); |
||||||
|
g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_); |
||||||
|
|
||||||
|
#if GST_CHECK_VERSION(1, 17, 0) |
||||||
|
// capture ICE gathering completion
|
||||||
|
g_signal_connect( |
||||||
|
webrtc_, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), nullptr); |
||||||
|
#endif |
||||||
|
// webrtcbin lifetime is the same as that of the pipeline
|
||||||
|
gst_object_unref(webrtc_); |
||||||
|
|
||||||
|
// start the pipeline
|
||||||
|
GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING); |
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) { |
||||||
|
nhlog::ui()->error("WebRTC: unable to start pipeline"); |
||||||
|
end(); |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_)); |
||||||
|
gst_bus_add_watch(bus, newBusMessage, this); |
||||||
|
gst_object_unref(bus); |
||||||
|
emit stateChanged(State::INITIATED); |
||||||
|
return true; |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::createPipeline(int opusPayloadType) |
||||||
|
{ |
||||||
|
int nSources = audioSources_ ? g_list_length(audioSources_) : 0; |
||||||
|
if (nSources == 0) { |
||||||
|
nhlog::ui()->error("WebRTC: no audio sources"); |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
if (audioSourceIndex_ < 0 || audioSourceIndex_ >= nSources) { |
||||||
|
nhlog::ui()->error("WebRTC: invalid audio source index"); |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
GstElement *source = gst_device_create_element( |
||||||
|
GST_DEVICE_CAST(g_list_nth_data(audioSources_, audioSourceIndex_)), nullptr); |
||||||
|
GstElement *volume = gst_element_factory_make("volume", "srclevel"); |
||||||
|
GstElement *convert = gst_element_factory_make("audioconvert", nullptr); |
||||||
|
GstElement *resample = gst_element_factory_make("audioresample", nullptr); |
||||||
|
GstElement *queue1 = gst_element_factory_make("queue", nullptr); |
||||||
|
GstElement *opusenc = gst_element_factory_make("opusenc", nullptr); |
||||||
|
GstElement *rtp = gst_element_factory_make("rtpopuspay", nullptr); |
||||||
|
GstElement *queue2 = gst_element_factory_make("queue", nullptr); |
||||||
|
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr); |
||||||
|
|
||||||
|
GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp", |
||||||
|
"media", |
||||||
|
G_TYPE_STRING, |
||||||
|
"audio", |
||||||
|
"encoding-name", |
||||||
|
G_TYPE_STRING, |
||||||
|
"OPUS", |
||||||
|
"payload", |
||||||
|
G_TYPE_INT, |
||||||
|
opusPayloadType, |
||||||
|
nullptr); |
||||||
|
g_object_set(capsfilter, "caps", rtpcaps, nullptr); |
||||||
|
gst_caps_unref(rtpcaps); |
||||||
|
|
||||||
|
GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin"); |
||||||
|
g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr); |
||||||
|
|
||||||
|
pipe_ = gst_pipeline_new(nullptr); |
||||||
|
gst_bin_add_many(GST_BIN(pipe_), |
||||||
|
source, |
||||||
|
volume, |
||||||
|
convert, |
||||||
|
resample, |
||||||
|
queue1, |
||||||
|
opusenc, |
||||||
|
rtp, |
||||||
|
queue2, |
||||||
|
capsfilter, |
||||||
|
webrtcbin, |
||||||
|
nullptr); |
||||||
|
|
||||||
|
if (!gst_element_link_many(source, |
||||||
|
volume, |
||||||
|
convert, |
||||||
|
resample, |
||||||
|
queue1, |
||||||
|
opusenc, |
||||||
|
rtp, |
||||||
|
queue2, |
||||||
|
capsfilter, |
||||||
|
webrtcbin, |
||||||
|
nullptr)) { |
||||||
|
nhlog::ui()->error("WebRTC: failed to link pipeline elements"); |
||||||
|
end(); |
||||||
|
return false; |
||||||
|
} |
||||||
|
return true; |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::toggleMuteAudioSrc(bool &isMuted) |
||||||
|
{ |
||||||
|
if (state_ < State::INITIATED) |
||||||
|
return false; |
||||||
|
|
||||||
|
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel"); |
||||||
|
if (!srclevel) |
||||||
|
return false; |
||||||
|
|
||||||
|
gboolean muted; |
||||||
|
g_object_get(srclevel, "mute", &muted, nullptr); |
||||||
|
g_object_set(srclevel, "mute", !muted, nullptr); |
||||||
|
gst_object_unref(srclevel); |
||||||
|
isMuted = !muted; |
||||||
|
return true; |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
WebRTCSession::end() |
||||||
|
{ |
||||||
|
nhlog::ui()->debug("WebRTC: ending session"); |
||||||
|
if (pipe_) { |
||||||
|
gst_element_set_state(pipe_, GST_STATE_NULL); |
||||||
|
gst_object_unref(pipe_); |
||||||
|
pipe_ = nullptr; |
||||||
|
} |
||||||
|
webrtc_ = nullptr; |
||||||
|
if (state_ != State::DISCONNECTED) |
||||||
|
emit stateChanged(State::DISCONNECTED); |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
WebRTCSession::refreshDevices() |
||||||
|
{ |
||||||
|
if (!initialised_) |
||||||
|
return; |
||||||
|
|
||||||
|
static GstDeviceMonitor *monitor = nullptr; |
||||||
|
if (!monitor) { |
||||||
|
monitor = gst_device_monitor_new(); |
||||||
|
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw"); |
||||||
|
gst_device_monitor_add_filter(monitor, "Audio/Source", caps); |
||||||
|
gst_caps_unref(caps); |
||||||
|
} |
||||||
|
g_list_free_full(audioSources_, g_object_unref); |
||||||
|
audioSources_ = gst_device_monitor_get_devices(monitor); |
||||||
|
} |
||||||
|
|
||||||
|
std::vector<std::string> |
||||||
|
WebRTCSession::getAudioSourceNames(const std::string &defaultDevice) |
||||||
|
{ |
||||||
|
if (!initialised_) |
||||||
|
return {}; |
||||||
|
|
||||||
|
refreshDevices(); |
||||||
|
std::vector<std::string> ret; |
||||||
|
ret.reserve(g_list_length(audioSources_)); |
||||||
|
for (GList *l = audioSources_; l != nullptr; l = l->next) { |
||||||
|
gchar *name = gst_device_get_display_name(GST_DEVICE_CAST(l->data)); |
||||||
|
ret.emplace_back(name); |
||||||
|
g_free(name); |
||||||
|
if (ret.back() == defaultDevice) { |
||||||
|
// move default device to top of the list
|
||||||
|
std::swap(audioSources_->data, l->data); |
||||||
|
std::swap(ret.front(), ret.back()); |
||||||
|
} |
||||||
|
} |
||||||
|
return ret; |
||||||
|
} |
||||||
|
#else |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::createOffer() |
||||||
|
{ |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::acceptOffer(const std::string &) |
||||||
|
{ |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::acceptAnswer(const std::string &) |
||||||
|
{ |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &) |
||||||
|
{} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::startPipeline(int) |
||||||
|
{ |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::createPipeline(int) |
||||||
|
{ |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
bool |
||||||
|
WebRTCSession::toggleMuteAudioSrc(bool &) |
||||||
|
{ |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
void |
||||||
|
WebRTCSession::end() |
||||||
|
{} |
||||||
|
|
||||||
|
void |
||||||
|
WebRTCSession::refreshDevices() |
||||||
|
{} |
||||||
|
|
||||||
|
std::vector<std::string> |
||||||
|
WebRTCSession::getAudioSourceNames(const std::string &) |
||||||
|
{ |
||||||
|
return {}; |
||||||
|
} |
||||||
|
|
||||||
|
#endif |
@ -0,0 +1,83 @@ |
|||||||
|
#pragma once |
||||||
|
|
||||||
|
#include <string> |
||||||
|
#include <vector> |
||||||
|
|
||||||
|
#include <QObject> |
||||||
|
|
||||||
|
#include "mtx/events/voip.hpp" |
||||||
|
|
||||||
|
typedef struct _GList GList; |
||||||
|
typedef struct _GstElement GstElement; |
||||||
|
|
||||||
|
class WebRTCSession : public QObject |
||||||
|
{ |
||||||
|
Q_OBJECT |
||||||
|
|
||||||
|
public: |
||||||
|
enum class State |
||||||
|
{ |
||||||
|
DISCONNECTED, |
||||||
|
ICEFAILED, |
||||||
|
INITIATING, |
||||||
|
INITIATED, |
||||||
|
OFFERSENT, |
||||||
|
ANSWERSENT, |
||||||
|
CONNECTING, |
||||||
|
CONNECTED |
||||||
|
}; |
||||||
|
|
||||||
|
static WebRTCSession &instance() |
||||||
|
{ |
||||||
|
static WebRTCSession instance; |
||||||
|
return instance; |
||||||
|
} |
||||||
|
|
||||||
|
bool init(std::string *errorMessage = nullptr); |
||||||
|
State state() const { return state_; } |
||||||
|
|
||||||
|
bool createOffer(); |
||||||
|
bool acceptOffer(const std::string &sdp); |
||||||
|
bool acceptAnswer(const std::string &sdp); |
||||||
|
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &); |
||||||
|
|
||||||
|
bool toggleMuteAudioSrc(bool &isMuted); |
||||||
|
void end(); |
||||||
|
|
||||||
|
void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; } |
||||||
|
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; } |
||||||
|
|
||||||
|
std::vector<std::string> getAudioSourceNames(const std::string &defaultDevice); |
||||||
|
void setAudioSource(int audioDeviceIndex) { audioSourceIndex_ = audioDeviceIndex; } |
||||||
|
|
||||||
|
signals: |
||||||
|
void offerCreated(const std::string &sdp, |
||||||
|
const std::vector<mtx::events::msg::CallCandidates::Candidate> &); |
||||||
|
void answerCreated(const std::string &sdp, |
||||||
|
const std::vector<mtx::events::msg::CallCandidates::Candidate> &); |
||||||
|
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &); |
||||||
|
void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt
|
||||||
|
|
||||||
|
private slots: |
||||||
|
void setState(State state) { state_ = state; } |
||||||
|
|
||||||
|
private: |
||||||
|
WebRTCSession(); |
||||||
|
|
||||||
|
bool initialised_ = false; |
||||||
|
State state_ = State::DISCONNECTED; |
||||||
|
GstElement *pipe_ = nullptr; |
||||||
|
GstElement *webrtc_ = nullptr; |
||||||
|
std::string stunServer_; |
||||||
|
std::vector<std::string> turnServers_; |
||||||
|
GList *audioSources_ = nullptr; |
||||||
|
int audioSourceIndex_ = -1; |
||||||
|
|
||||||
|
bool startPipeline(int opusPayloadType); |
||||||
|
bool createPipeline(int opusPayloadType); |
||||||
|
void refreshDevices(); |
||||||
|
|
||||||
|
public: |
||||||
|
WebRTCSession(WebRTCSession const &) = delete; |
||||||
|
void operator=(WebRTCSession const &) = delete; |
||||||
|
}; |
@ -0,0 +1,135 @@ |
|||||||
|
#include <QComboBox> |
||||||
|
#include <QLabel> |
||||||
|
#include <QPushButton> |
||||||
|
#include <QString> |
||||||
|
#include <QVBoxLayout> |
||||||
|
|
||||||
|
#include "ChatPage.h" |
||||||
|
#include "Config.h" |
||||||
|
#include "UserSettingsPage.h" |
||||||
|
#include "Utils.h" |
||||||
|
#include "WebRTCSession.h" |
||||||
|
#include "dialogs/AcceptCall.h" |
||||||
|
#include "ui/Avatar.h" |
||||||
|
|
||||||
|
namespace dialogs { |
||||||
|
|
||||||
|
AcceptCall::AcceptCall(const QString &caller, |
||||||
|
const QString &displayName, |
||||||
|
const QString &roomName, |
||||||
|
const QString &avatarUrl, |
||||||
|
QSharedPointer<UserSettings> settings, |
||||||
|
QWidget *parent) |
||||||
|
: QWidget(parent) |
||||||
|
{ |
||||||
|
std::string errorMessage; |
||||||
|
if (!WebRTCSession::instance().init(&errorMessage)) { |
||||||
|
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); |
||||||
|
emit close(); |
||||||
|
return; |
||||||
|
} |
||||||
|
audioDevices_ = WebRTCSession::instance().getAudioSourceNames( |
||||||
|
settings->defaultAudioSource().toStdString()); |
||||||
|
if (audioDevices_.empty()) { |
||||||
|
emit ChatPage::instance()->showNotification( |
||||||
|
"Incoming call: No audio sources found."); |
||||||
|
emit close(); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
setAutoFillBackground(true); |
||||||
|
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); |
||||||
|
setWindowModality(Qt::WindowModal); |
||||||
|
setAttribute(Qt::WA_DeleteOnClose, true); |
||||||
|
|
||||||
|
setMinimumWidth(conf::modals::MIN_WIDGET_WIDTH); |
||||||
|
setSizePolicy(QSizePolicy::Maximum, QSizePolicy::Maximum); |
||||||
|
|
||||||
|
auto layout = new QVBoxLayout(this); |
||||||
|
layout->setSpacing(conf::modals::WIDGET_SPACING); |
||||||
|
layout->setMargin(conf::modals::WIDGET_MARGIN); |
||||||
|
|
||||||
|
QFont f; |
||||||
|
f.setPointSizeF(f.pointSizeF()); |
||||||
|
|
||||||
|
QFont labelFont; |
||||||
|
labelFont.setWeight(QFont::Medium); |
||||||
|
|
||||||
|
QLabel *displayNameLabel = nullptr; |
||||||
|
if (!displayName.isEmpty() && displayName != caller) { |
||||||
|
displayNameLabel = new QLabel(displayName, this); |
||||||
|
labelFont.setPointSizeF(f.pointSizeF() * 2); |
||||||
|
displayNameLabel->setFont(labelFont); |
||||||
|
displayNameLabel->setAlignment(Qt::AlignCenter); |
||||||
|
} |
||||||
|
|
||||||
|
QLabel *callerLabel = new QLabel(caller, this); |
||||||
|
labelFont.setPointSizeF(f.pointSizeF() * 1.2); |
||||||
|
callerLabel->setFont(labelFont); |
||||||
|
callerLabel->setAlignment(Qt::AlignCenter); |
||||||
|
|
||||||
|
auto avatar = new Avatar(this, QFontMetrics(f).height() * 6); |
||||||
|
if (!avatarUrl.isEmpty()) |
||||||
|
avatar->setImage(avatarUrl); |
||||||
|
else |
||||||
|
avatar->setLetter(utils::firstChar(roomName)); |
||||||
|
|
||||||
|
const int iconSize = 22; |
||||||
|
QLabel *callTypeIndicator = new QLabel(this); |
||||||
|
callTypeIndicator->setPixmap( |
||||||
|
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2))); |
||||||
|
|
||||||
|
QLabel *callTypeLabel = new QLabel("Voice Call", this); |
||||||
|
labelFont.setPointSizeF(f.pointSizeF() * 1.1); |
||||||
|
callTypeLabel->setFont(labelFont); |
||||||
|
callTypeLabel->setAlignment(Qt::AlignCenter); |
||||||
|
|
||||||
|
auto buttonLayout = new QHBoxLayout; |
||||||
|
buttonLayout->setSpacing(18); |
||||||
|
acceptBtn_ = new QPushButton(tr("Accept"), this); |
||||||
|
acceptBtn_->setDefault(true); |
||||||
|
acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); |
||||||
|
acceptBtn_->setIconSize(QSize(iconSize, iconSize)); |
||||||
|
|
||||||
|
rejectBtn_ = new QPushButton(tr("Reject"), this); |
||||||
|
rejectBtn_->setIcon(QIcon(":/icons/icons/ui/end-call.png")); |
||||||
|
rejectBtn_->setIconSize(QSize(iconSize, iconSize)); |
||||||
|
buttonLayout->addWidget(acceptBtn_); |
||||||
|
buttonLayout->addWidget(rejectBtn_); |
||||||
|
|
||||||
|
auto deviceLayout = new QHBoxLayout; |
||||||
|
auto audioLabel = new QLabel(this); |
||||||
|
audioLabel->setPixmap( |
||||||
|
QIcon(":/icons/icons/ui/microphone-unmute.png").pixmap(QSize(iconSize, iconSize))); |
||||||
|
|
||||||
|
auto deviceList = new QComboBox(this); |
||||||
|
for (const auto &d : audioDevices_) |
||||||
|
deviceList->addItem(QString::fromStdString(d)); |
||||||
|
|
||||||
|
deviceLayout->addStretch(); |
||||||
|
deviceLayout->addWidget(audioLabel); |
||||||
|
deviceLayout->addWidget(deviceList); |
||||||
|
|
||||||
|
if (displayNameLabel) |
||||||
|
layout->addWidget(displayNameLabel, 0, Qt::AlignCenter); |
||||||
|
layout->addWidget(callerLabel, 0, Qt::AlignCenter); |
||||||
|
layout->addWidget(avatar, 0, Qt::AlignCenter); |
||||||
|
layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter); |
||||||
|
layout->addWidget(callTypeLabel, 0, Qt::AlignCenter); |
||||||
|
layout->addLayout(buttonLayout); |
||||||
|
layout->addLayout(deviceLayout); |
||||||
|
|
||||||
|
connect(acceptBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() { |
||||||
|
WebRTCSession::instance().setAudioSource(deviceList->currentIndex()); |
||||||
|
settings->setDefaultAudioSource( |
||||||
|
QString::fromStdString(audioDevices_[deviceList->currentIndex()])); |
||||||
|
emit accept(); |
||||||
|
emit close(); |
||||||
|
}); |
||||||
|
connect(rejectBtn_, &QPushButton::clicked, this, [this]() { |
||||||
|
emit reject(); |
||||||
|
emit close(); |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
} |
@ -0,0 +1,37 @@ |
|||||||
|
#pragma once |
||||||
|
|
||||||
|
#include <string> |
||||||
|
#include <vector> |
||||||
|
|
||||||
|
#include <QSharedPointer> |
||||||
|
#include <QWidget> |
||||||
|
|
||||||
|
class QPushButton; |
||||||
|
class QString; |
||||||
|
class UserSettings; |
||||||
|
|
||||||
|
namespace dialogs { |
||||||
|
|
||||||
|
class AcceptCall : public QWidget |
||||||
|
{ |
||||||
|
Q_OBJECT |
||||||
|
|
||||||
|
public: |
||||||
|
AcceptCall(const QString &caller, |
||||||
|
const QString &displayName, |
||||||
|
const QString &roomName, |
||||||
|
const QString &avatarUrl, |
||||||
|
QSharedPointer<UserSettings> settings, |
||||||
|
QWidget *parent = nullptr); |
||||||
|
|
||||||
|
signals: |
||||||
|
void accept(); |
||||||
|
void reject(); |
||||||
|
|
||||||
|
private: |
||||||
|
QPushButton *acceptBtn_; |
||||||
|
QPushButton *rejectBtn_; |
||||||
|
std::vector<std::string> audioDevices_; |
||||||
|
}; |
||||||
|
|
||||||
|
} |
@ -0,0 +1,104 @@ |
|||||||
|
#include <QComboBox> |
||||||
|
#include <QLabel> |
||||||
|
#include <QPushButton> |
||||||
|
#include <QString> |
||||||
|
#include <QVBoxLayout> |
||||||
|
|
||||||
|
#include "ChatPage.h" |
||||||
|
#include "Config.h" |
||||||
|
#include "UserSettingsPage.h" |
||||||
|
#include "Utils.h" |
||||||
|
#include "WebRTCSession.h" |
||||||
|
#include "dialogs/PlaceCall.h" |
||||||
|
#include "ui/Avatar.h" |
||||||
|
|
||||||
|
namespace dialogs { |
||||||
|
|
||||||
|
PlaceCall::PlaceCall(const QString &callee, |
||||||
|
const QString &displayName, |
||||||
|
const QString &roomName, |
||||||
|
const QString &avatarUrl, |
||||||
|
QSharedPointer<UserSettings> settings, |
||||||
|
QWidget *parent) |
||||||
|
: QWidget(parent) |
||||||
|
{ |
||||||
|
std::string errorMessage; |
||||||
|
if (!WebRTCSession::instance().init(&errorMessage)) { |
||||||
|
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); |
||||||
|
emit close(); |
||||||
|
return; |
||||||
|
} |
||||||
|
audioDevices_ = WebRTCSession::instance().getAudioSourceNames( |
||||||
|
settings->defaultAudioSource().toStdString()); |
||||||
|
if (audioDevices_.empty()) { |
||||||
|
emit ChatPage::instance()->showNotification("No audio sources found."); |
||||||
|
emit close(); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
setAutoFillBackground(true); |
||||||
|
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); |
||||||
|
setWindowModality(Qt::WindowModal); |
||||||
|
setAttribute(Qt::WA_DeleteOnClose, true); |
||||||
|
|
||||||
|
auto layout = new QVBoxLayout(this); |
||||||
|
layout->setSpacing(conf::modals::WIDGET_SPACING); |
||||||
|
layout->setMargin(conf::modals::WIDGET_MARGIN); |
||||||
|
|
||||||
|
auto buttonLayout = new QHBoxLayout; |
||||||
|
buttonLayout->setSpacing(15); |
||||||
|
buttonLayout->setMargin(0); |
||||||
|
|
||||||
|
QFont f; |
||||||
|
f.setPointSizeF(f.pointSizeF()); |
||||||
|
auto avatar = new Avatar(this, QFontMetrics(f).height() * 3); |
||||||
|
if (!avatarUrl.isEmpty()) |
||||||
|
avatar->setImage(avatarUrl); |
||||||
|
else |
||||||
|
avatar->setLetter(utils::firstChar(roomName)); |
||||||
|
const int iconSize = 18; |
||||||
|
voiceBtn_ = new QPushButton(tr("Voice"), this); |
||||||
|
voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); |
||||||
|
voiceBtn_->setIconSize(QSize(iconSize, iconSize)); |
||||||
|
voiceBtn_->setDefault(true); |
||||||
|
cancelBtn_ = new QPushButton(tr("Cancel"), this); |
||||||
|
|
||||||
|
buttonLayout->addWidget(avatar); |
||||||
|
buttonLayout->addStretch(); |
||||||
|
buttonLayout->addWidget(voiceBtn_); |
||||||
|
buttonLayout->addWidget(cancelBtn_); |
||||||
|
|
||||||
|
QString name = displayName.isEmpty() ? callee : displayName; |
||||||
|
QLabel *label = new QLabel("Place a call to " + name + "?", this); |
||||||
|
|
||||||
|
auto deviceLayout = new QHBoxLayout; |
||||||
|
auto audioLabel = new QLabel(this); |
||||||
|
audioLabel->setPixmap(QIcon(":/icons/icons/ui/microphone-unmute.png") |
||||||
|
.pixmap(QSize(iconSize * 1.2, iconSize * 1.2))); |
||||||
|
|
||||||
|
auto deviceList = new QComboBox(this); |
||||||
|
for (const auto &d : audioDevices_) |
||||||
|
deviceList->addItem(QString::fromStdString(d)); |
||||||
|
|
||||||
|
deviceLayout->addStretch(); |
||||||
|
deviceLayout->addWidget(audioLabel); |
||||||
|
deviceLayout->addWidget(deviceList); |
||||||
|
|
||||||
|
layout->addWidget(label); |
||||||
|
layout->addLayout(buttonLayout); |
||||||
|
layout->addLayout(deviceLayout); |
||||||
|
|
||||||
|
connect(voiceBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() { |
||||||
|
WebRTCSession::instance().setAudioSource(deviceList->currentIndex()); |
||||||
|
settings->setDefaultAudioSource( |
||||||
|
QString::fromStdString(audioDevices_[deviceList->currentIndex()])); |
||||||
|
emit voice(); |
||||||
|
emit close(); |
||||||
|
}); |
||||||
|
connect(cancelBtn_, &QPushButton::clicked, this, [this]() { |
||||||
|
emit cancel(); |
||||||
|
emit close(); |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
} |
@ -0,0 +1,37 @@ |
|||||||
|
#pragma once |
||||||
|
|
||||||
|
#include <string> |
||||||
|
#include <vector> |
||||||
|
|
||||||
|
#include <QSharedPointer> |
||||||
|
#include <QWidget> |
||||||
|
|
||||||
|
class QPushButton; |
||||||
|
class QString; |
||||||
|
class UserSettings; |
||||||
|
|
||||||
|
namespace dialogs { |
||||||
|
|
||||||
|
class PlaceCall : public QWidget |
||||||
|
{ |
||||||
|
Q_OBJECT |
||||||
|
|
||||||
|
public: |
||||||
|
PlaceCall(const QString &callee, |
||||||
|
const QString &displayName, |
||||||
|
const QString &roomName, |
||||||
|
const QString &avatarUrl, |
||||||
|
QSharedPointer<UserSettings> settings, |
||||||
|
QWidget *parent = nullptr); |
||||||
|
|
||||||
|
signals: |
||||||
|
void voice(); |
||||||
|
void cancel(); |
||||||
|
|
||||||
|
private: |
||||||
|
QPushButton *voiceBtn_; |
||||||
|
QPushButton *cancelBtn_; |
||||||
|
std::vector<std::string> audioDevices_; |
||||||
|
}; |
||||||
|
|
||||||
|
} |
Loading…
Reference in new issue