Merge pull request #237 from trilene/voip

Support voice calls
master
DeepBlueV7.X 4 years ago committed by GitHub
commit 3fece53eb7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 20
      CMakeLists.txt
  2. 6
      io.github.NhekoReborn.Nheko.json
  3. BIN
      resources/icons/ui/end-call.png
  4. BIN
      resources/icons/ui/microphone-mute.png
  5. BIN
      resources/icons/ui/microphone-unmute.png
  6. BIN
      resources/icons/ui/place-call.png
  7. 45
      resources/langs/nheko_en.ts
  8. 5
      resources/media/README.txt
  9. BIN
      resources/media/callend.ogg
  10. BIN
      resources/media/ring.ogg
  11. BIN
      resources/media/ringback.ogg
  12. 18
      resources/qml/delegates/MessageDelegate.qml
  13. 10
      resources/res.qrc
  14. 160
      src/ActiveCallBar.cpp
  15. 40
      src/ActiveCallBar.h
  16. 3
      src/Cache.cpp
  17. 458
      src/CallManager.cpp
  18. 75
      src/CallManager.h
  19. 58
      src/ChatPage.cpp
  20. 8
      src/ChatPage.h
  21. 31
      src/EventAccessors.cpp
  22. 3
      src/EventAccessors.h
  23. 23
      src/MainWindow.cpp
  24. 31
      src/TextInputWidget.cpp
  25. 4
      src/TextInputWidget.h
  26. 48
      src/UserSettingsPage.cpp
  27. 14
      src/UserSettingsPage.h
  28. 14
      src/Utils.cpp
  29. 27
      src/Utils.h
  30. 697
      src/WebRTCSession.cpp
  31. 83
      src/WebRTCSession.h
  32. 135
      src/dialogs/AcceptCall.cpp
  33. 37
      src/dialogs/AcceptCall.h
  34. 104
      src/dialogs/PlaceCall.cpp
  35. 37
      src/dialogs/PlaceCall.h
  36. 191
      src/timeline/TimelineModel.cpp
  37. 22
      src/timeline/TimelineModel.h
  38. 64
      src/timeline/TimelineViewManager.cpp
  39. 11
      src/timeline/TimelineViewManager.h

@ -225,6 +225,7 @@ configure_file(cmake/nheko.h config/nheko.h)
#
set(SRC_FILES
# Dialogs
src/dialogs/AcceptCall.cpp
src/dialogs/CreateRoom.cpp
src/dialogs/FallbackAuth.cpp
src/dialogs/ImageOverlay.cpp
@ -233,6 +234,7 @@ set(SRC_FILES
src/dialogs/LeaveRoom.cpp
src/dialogs/Logout.cpp
src/dialogs/MemberList.cpp
src/dialogs/PlaceCall.cpp
src/dialogs/PreviewUploadOverlay.cpp
src/dialogs/ReCaptcha.cpp
src/dialogs/ReadReceipts.cpp
@ -276,9 +278,11 @@ set(SRC_FILES
src/ui/Theme.cpp
src/ui/ThemeManager.cpp
src/ActiveCallBar.cpp
src/AvatarProvider.cpp
src/BlurhashProvider.cpp
src/Cache.cpp
src/CallManager.cpp
src/ChatPage.cpp
src/ColorImageProvider.cpp
src/CommunitiesList.cpp
@ -304,6 +308,7 @@ set(SRC_FILES
src/UserInfoWidget.cpp
src/UserSettingsPage.cpp
src/Utils.cpp
src/WebRTCSession.cpp
src/WelcomePage.cpp
src/popups/PopupItem.cpp
src/popups/SuggestionsPopup.cpp
@ -335,7 +340,7 @@ if(USE_BUNDLED_MTXCLIENT)
FetchContent_Declare(
MatrixClient
GIT_REPOSITORY https://github.com/Nheko-Reborn/mtxclient.git
GIT_TAG eddd95a896fad0c51fc800741d82bbc43fc6d41e
GIT_TAG 744018c86a8094acbda9821d6d7b5a890d4aac47
)
FetchContent_MakeAvailable(MatrixClient)
else()
@ -421,6 +426,9 @@ else()
find_package(Tweeny REQUIRED)
endif()
include(FindPkgConfig)
pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>=1.14 gstreamer-webrtc-1.0>=1.14)
# single instance functionality
set(QAPPLICATION_CLASS QApplication CACHE STRING "Inheritance class for SingleApplication")
add_subdirectory(third_party/SingleApplication-3.1.3.1/)
@ -429,6 +437,7 @@ feature_summary(WHAT ALL INCLUDE_QUIET_PACKAGES FATAL_ON_MISSING_REQUIRED_PACKAG
qt5_wrap_cpp(MOC_HEADERS
# Dialogs
src/dialogs/AcceptCall.h
src/dialogs/CreateRoom.h
src/dialogs/FallbackAuth.h
src/dialogs/ImageOverlay.h
@ -437,6 +446,7 @@ qt5_wrap_cpp(MOC_HEADERS
src/dialogs/LeaveRoom.h
src/dialogs/Logout.h
src/dialogs/MemberList.h
src/dialogs/PlaceCall.h
src/dialogs/PreviewUploadOverlay.h
src/dialogs/RawMessage.h
src/dialogs/ReCaptcha.h
@ -480,9 +490,11 @@ qt5_wrap_cpp(MOC_HEADERS
src/notifications/Manager.h
src/ActiveCallBar.h
src/AvatarProvider.h
src/BlurhashProvider.h
src/Cache_p.h
src/CallManager.h
src/ChatPage.h
src/CommunitiesList.h
src/CommunitiesListItem.h
@ -502,6 +514,7 @@ qt5_wrap_cpp(MOC_HEADERS
src/TrayIcon.h
src/UserInfoWidget.h
src/UserSettingsPage.h
src/WebRTCSession.h
src/WelcomePage.h
src/popups/PopupItem.h
src/popups/SuggestionsPopup.h
@ -590,6 +603,11 @@ target_precompile_headers(nheko
)
endif()
if (TARGET PkgConfig::GSTREAMER)
target_link_libraries(nheko PRIVATE PkgConfig::GSTREAMER)
target_compile_definitions(nheko PRIVATE GSTREAMER_AVAILABLE)
endif()
if(MSVC)
target_link_libraries(nheko PRIVATE ntdll)
endif()

@ -146,9 +146,9 @@
"name": "mtxclient",
"sources": [
{
"sha256": "6334bb71821a0fde54fe24f02ad393cdb6836633557ffdd239b29c5d5108daaf",
"type": "archive",
"url": "https://github.com/Nheko-Reborn/mtxclient/archive/eddd95a896fad0c51fc800741d82bbc43fc6d41e.tar.gz"
"commit": "744018c86a8094acbda9821d6d7b5a890d4aac47",
"type": "git",
"url": "https://github.com/Nheko-Reborn/mtxclient.git"
}
]
},

Binary file not shown.

After

Width:  |  Height:  |  Size: 643 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 759 B

@ -404,6 +404,21 @@ Example: https://server.my:8787</translation>
<source>%1 created and configured room: %2</source>
<translation>%1 created and configured room: %2</translation>
</message>
<message>
<location line="+6"/>
<source>%1 placed a %2 call.</source>
<translation>%1 placed a %2 call.</translation>
</message>
<message>
<location line="+6"/>
<source>%1 answered the call.</source>
<translation>%1 answered the call.</translation>
</message>
<message>
<location line="+6"/>
<source>%1 ended the call.</source>
<translation>%1 ended the call.</translation>
</message>
</context>
<context>
<name>Placeholder</name>
@ -1796,6 +1811,36 @@ Media size: %2
<source>%1 sent an encrypted message</source>
<translation>%1 sent an encrypted message</translation>
</message>
<message>
<location line="+5"/>
<source>You placed a call</source>
<translation>You placed a call</translation>
</message>
<message>
<location line="+3"/>
<source>%1 placed a call</source>
<translation>%1 placed a call</translation>
</message>
<message>
<location line="+5"/>
<source>You answered a call</source>
<translation>You answered a call</translation>
</message>
<message>
<location line="+3"/>
<source>%1 answered a call</source>
<translation>%1 answered a call</translation>
</message>
<message>
<location line="+5"/>
<source>You ended a call</source>
<translation>You ended a call</translation>
</message>
<message>
<location line="+3"/>
<source>%1 ended a call</source>
<translation>%1 ended a call</translation>
</message>
</context>
<context>
<name>popups::UserMentions</name>

@ -0,0 +1,5 @@
The below media files were obtained from https://github.com/matrix-org/matrix-react-sdk/tree/develop/res/media
callend.ogg
ringback.ogg
ring.ogg

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -90,6 +90,24 @@ Item {
text: qsTr("%1 created and configured room: %2").arg(model.data.userName).arg(model.data.roomId)
}
}
DelegateChoice {
roleValue: MtxEvent.CallInvite
NoticeMessage {
text: qsTr("%1 placed a %2 call.").arg(model.data.userName).arg(model.data.callType)
}
}
DelegateChoice {
roleValue: MtxEvent.CallAnswer
NoticeMessage {
text: qsTr("%1 answered the call.").arg(model.data.userName)
}
}
DelegateChoice {
roleValue: MtxEvent.CallHangUp
NoticeMessage {
text: qsTr("%1 ended the call.").arg(model.data.userName)
}
}
DelegateChoice {
// TODO: make a more complex formatter for the power levels.
roleValue: MtxEvent.PowerLevels

@ -70,6 +70,11 @@
<file>icons/ui/mail-reply.png</file>
<file>icons/ui/place-call.png</file>
<file>icons/ui/end-call.png</file>
<file>icons/ui/microphone-mute.png</file>
<file>icons/ui/microphone-unmute.png</file>
<file>icons/emoji-categories/people.png</file>
<file>icons/emoji-categories/people@2x.png</file>
<file>icons/emoji-categories/nature.png</file>
@ -136,4 +141,9 @@
<file>qml/delegates/Placeholder.qml</file>
<file>qml/delegates/Reply.qml</file>
</qresource>
<qresource prefix="/media">
<file>media/ring.ogg</file>
<file>media/ringback.ogg</file>
<file>media/callend.ogg</file>
</qresource>
</RCC>

@ -0,0 +1,160 @@
#include <cstdio>
#include <QDateTime>
#include <QHBoxLayout>
#include <QIcon>
#include <QLabel>
#include <QString>
#include <QTimer>
#include "ActiveCallBar.h"
#include "ChatPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "ui/Avatar.h"
#include "ui/FlatButton.h"
ActiveCallBar::ActiveCallBar(QWidget *parent)
: QWidget(parent)
{
setAutoFillBackground(true);
auto p = palette();
p.setColor(backgroundRole(), QColor(46, 204, 113));
setPalette(p);
QFont f;
f.setPointSizeF(f.pointSizeF());
const int fontHeight = QFontMetrics(f).height();
const int widgetMargin = fontHeight / 3;
const int contentHeight = fontHeight * 3;
setFixedHeight(contentHeight + widgetMargin);
layout_ = new QHBoxLayout(this);
layout_->setSpacing(widgetMargin);
layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
QFont labelFont;
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1);
labelFont.setWeight(QFont::Medium);
avatar_ = new Avatar(this, QFontMetrics(f).height() * 2.5);
callPartyLabel_ = new QLabel(this);
callPartyLabel_->setFont(labelFont);
stateLabel_ = new QLabel(this);
stateLabel_->setFont(labelFont);
durationLabel_ = new QLabel(this);
durationLabel_->setFont(labelFont);
durationLabel_->hide();
muteBtn_ = new FlatButton(this);
setMuteIcon(false);
muteBtn_->setFixedSize(buttonSize_, buttonSize_);
muteBtn_->setCornerRadius(buttonSize_ / 2);
connect(muteBtn_, &FlatButton::clicked, this, [this]() {
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_))
setMuteIcon(muted_);
});
layout_->addWidget(avatar_, 0, Qt::AlignLeft);
layout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft);
layout_->addWidget(stateLabel_, 0, Qt::AlignLeft);
layout_->addWidget(durationLabel_, 0, Qt::AlignLeft);
layout_->addStretch();
layout_->addWidget(muteBtn_, 0, Qt::AlignCenter);
layout_->addSpacing(18);
timer_ = new QTimer(this);
connect(timer_, &QTimer::timeout, this, [this]() {
auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_;
int s = seconds % 60;
int m = (seconds / 60) % 60;
int h = seconds / 3600;
char buf[12];
if (h)
snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s);
else
snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s);
durationLabel_->setText(buf);
});
connect(
&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update);
}
void
ActiveCallBar::setMuteIcon(bool muted)
{
QIcon icon;
if (muted) {
muteBtn_->setToolTip("Unmute Mic");
icon.addFile(":/icons/icons/ui/microphone-unmute.png");
} else {
muteBtn_->setToolTip("Mute Mic");
icon.addFile(":/icons/icons/ui/microphone-mute.png");
}
muteBtn_->setIcon(icon);
muteBtn_->setIconSize(QSize(buttonSize_, buttonSize_));
}
void
ActiveCallBar::setCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl)
{
callPartyLabel_->setText(" " + (displayName.isEmpty() ? userid : displayName) + " ");
if (!avatarUrl.isEmpty())
avatar_->setImage(avatarUrl);
else
avatar_->setLetter(utils::firstChar(roomName));
}
void
ActiveCallBar::update(WebRTCSession::State state)
{
switch (state) {
case WebRTCSession::State::INITIATING:
show();
stateLabel_->setText("Initiating call...");
break;
case WebRTCSession::State::INITIATED:
show();
stateLabel_->setText("Call initiated...");
break;
case WebRTCSession::State::OFFERSENT:
show();
stateLabel_->setText("Calling...");
break;
case WebRTCSession::State::CONNECTING:
show();
stateLabel_->setText("Connecting...");
break;
case WebRTCSession::State::CONNECTED:
show();
callStartTime_ = QDateTime::currentSecsSinceEpoch();
timer_->start(1000);
stateLabel_->setPixmap(
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_)));
durationLabel_->setText("00:00");
durationLabel_->show();
break;
case WebRTCSession::State::ICEFAILED:
case WebRTCSession::State::DISCONNECTED:
hide();
timer_->stop();
callPartyLabel_->setText(QString());
stateLabel_->setText(QString());
durationLabel_->setText(QString());
durationLabel_->hide();
setMuteIcon(false);
break;
default:
break;
}
}

@ -0,0 +1,40 @@
#pragma once
#include <QWidget>
#include "WebRTCSession.h"
class QHBoxLayout;
class QLabel;
class QTimer;
class Avatar;
class FlatButton;
class ActiveCallBar : public QWidget
{
Q_OBJECT
public:
ActiveCallBar(QWidget *parent = nullptr);
public slots:
void update(WebRTCSession::State);
void setCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl);
private:
QHBoxLayout *layout_ = nullptr;
Avatar *avatar_ = nullptr;
QLabel *callPartyLabel_ = nullptr;
QLabel *stateLabel_ = nullptr;
QLabel *durationLabel_ = nullptr;
FlatButton *muteBtn_ = nullptr;
int buttonSize_ = 22;
bool muted_ = false;
qint64 callStartTime_ = 0;
QTimer *timer_ = nullptr;
void setMuteIcon(bool muted);
};

@ -1364,6 +1364,9 @@ Cache::getLastMessageInfo(lmdb::txn &txn, const std::string &room_id)
if (!(obj["event"]["type"] == "m.room.message" ||
obj["event"]["type"] == "m.sticker" ||
obj["event"]["type"] == "m.call.invite" ||
obj["event"]["type"] == "m.call.answer" ||
obj["event"]["type"] == "m.call.hangup" ||
obj["event"]["type"] == "m.room.encrypted"))
continue;

@ -0,0 +1,458 @@
#include <algorithm>
#include <cctype>
#include <chrono>
#include <cstdint>
#include <QMediaPlaylist>
#include <QUrl>
#include "Cache.h"
#include "CallManager.h"
#include "ChatPage.h"
#include "Logging.h"
#include "MainWindow.h"
#include "MatrixClient.h"
#include "UserSettingsPage.h"
#include "WebRTCSession.h"
#include "dialogs/AcceptCall.h"
#include "mtx/responses/turn_server.hpp"
Q_DECLARE_METATYPE(std::vector<mtx::events::msg::CallCandidates::Candidate>)
Q_DECLARE_METATYPE(mtx::events::msg::CallCandidates::Candidate)
Q_DECLARE_METATYPE(mtx::responses::TurnServer)
using namespace mtx::events;
using namespace mtx::events::msg;
// https://github.com/vector-im/riot-web/issues/10173
#define STUN_SERVER "stun://turn.matrix.org:3478"
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer);
}
CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
: QObject()
, session_(WebRTCSession::instance())
, turnServerTimer_(this)
, settings_(userSettings)
{
qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>();
qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>();
qRegisterMetaType<mtx::responses::TurnServer>();
connect(
&session_,
&WebRTCSession::offerCreated,
this,
[this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_);
emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
QTimer::singleShot(timeoutms_, this, [this]() {
if (session_.state() == WebRTCSession::State::OFFERSENT) {
hangUp(CallHangUp::Reason::InviteTimeOut);
emit ChatPage::instance()->showNotification(
"The remote side failed to pick up.");
}
});
});
connect(
&session_,
&WebRTCSession::answerCreated,
this,
[this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_);
emit newMessage(roomid_, CallAnswer{callid_, sdp, 0});
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
});
connect(&session_,
&WebRTCSession::newICECandidate,
this,
[this](const CallCandidates::Candidate &candidate) {
nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_);
emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0});
});
connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer);
connect(this,
&CallManager::turnServerRetrieved,
this,
[this](const mtx::responses::TurnServer &res) {
nhlog::net()->info("TURN server(s) retrieved from homeserver:");
nhlog::net()->info("username: {}", res.username);
nhlog::net()->info("ttl: {} seconds", res.ttl);
for (const auto &u : res.uris)
nhlog::net()->info("uri: {}", u);
// Request new credentials close to expiry
// See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00
turnURIs_ = getTurnURIs(res);
uint32_t ttl = std::max(res.ttl, UINT32_C(3600));
if (res.ttl < 3600)
nhlog::net()->warn("Setting ttl to 1 hour");
turnServerTimer_.setInterval(ttl * 1000 * 0.9);
});
connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) {
switch (state) {
case WebRTCSession::State::DISCONNECTED:
playRingtone("qrc:/media/media/callend.ogg", false);
clear();
break;
case WebRTCSession::State::ICEFAILED: {
QString error("Call connection failed.");
if (turnURIs_.empty())
error += " Your homeserver has no configured TURN server.";
emit ChatPage::instance()->showNotification(error);
hangUp(CallHangUp::Reason::ICEFailed);
break;
}
default:
break;
}
});
connect(&player_,
&QMediaPlayer::mediaStatusChanged,
this,
[this](QMediaPlayer::MediaStatus status) {
if (status == QMediaPlayer::LoadedMedia)
player_.play();
});
}
void
CallManager::sendInvite(const QString &roomid)
{
if (onActiveCall())
return;
auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
if (roomInfo.member_count != 2) {
emit ChatPage::instance()->showNotification(
"Voice calls are limited to 1:1 rooms.");
return;
}
std::string errorMessage;
if (!session_.init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
return;
}
roomid_ = roomid;
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
session_.setTurnServers(turnURIs_);
generateCallID();
nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_);
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
const RoomMember &callee =
members.front().user_id == utils::localUser() ? members.back() : members.front();
emit newCallParty(callee.user_id,
callee.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url));
playRingtone("qrc:/media/media/ringback.ogg", true);
if (!session_.createOffer()) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
endCall();
}
}
namespace {
std::string
callHangUpReasonString(CallHangUp::Reason reason)
{
switch (reason) {
case CallHangUp::Reason::ICEFailed:
return "ICE failed";
case CallHangUp::Reason::InviteTimeOut:
return "Invite time out";
default:
return "User";
}
}
}
void
CallManager::hangUp(CallHangUp::Reason reason)
{
if (!callid_.empty()) {
nhlog::ui()->debug(
"WebRTC: call id: {} - hanging up ({})", callid_, callHangUpReasonString(reason));
emit newMessage(roomid_, CallHangUp{callid_, 0, reason});
endCall();
}
}
bool
CallManager::onActiveCall()
{
return session_.state() != WebRTCSession::State::DISCONNECTED;
}
void
CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event)
{
#ifdef GSTREAMER_AVAILABLE
if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event) ||
handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event))
return;
#else
(void)event;
#endif
}
template<typename T>
bool
CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event)
{
if (std::holds_alternative<RoomEvent<T>>(event)) {
handleEvent(std::get<RoomEvent<T>>(event));
return true;
}
return false;
}
void
CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
{
const char video[] = "m=video";
const std::string &sdp = callInviteEvent.content.sdp;
bool isVideo = std::search(sdp.cbegin(),
sdp.cend(),
std::cbegin(video),
std::cend(video) - 1,
[](unsigned char c1, unsigned char c2) {
return std::tolower(c1) == std::tolower(c2);
}) != sdp.cend();
nhlog::ui()->debug("WebRTC: call id: {} - incoming {} CallInvite from {}",
callInviteEvent.content.call_id,
(isVideo ? "video" : "voice"),
callInviteEvent.sender);
if (callInviteEvent.content.call_id.empty())
return;
auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id);
if (onActiveCall() || roomInfo.member_count != 2 || isVideo) {
emit newMessage(QString::fromStdString(callInviteEvent.room_id),
CallHangUp{callInviteEvent.content.call_id,
0,
CallHangUp::Reason::InviteTimeOut});
return;
}
playRingtone("qrc:/media/media/ring.ogg", true);
roomid_ = QString::fromStdString(callInviteEvent.room_id);
callid_ = callInviteEvent.content.call_id;
remoteICECandidates_.clear();
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
const RoomMember &caller =
members.front().user_id == utils::localUser() ? members.back() : members.front();
emit newCallParty(caller.user_id,
caller.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url));
auto dialog = new dialogs::AcceptCall(caller.user_id,
caller.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url),
settings_,
MainWindow::instance());
connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() {
MainWindow::instance()->hideOverlay();
answerInvite(callInviteEvent.content);
});
connect(dialog, &dialogs::AcceptCall::reject, this, [this]() {
MainWindow::instance()->hideOverlay();
hangUp();
});
MainWindow::instance()->showSolidOverlayModal(dialog);
}
void
CallManager::answerInvite(const CallInvite &invite)
{
stopRingtone();
std::string errorMessage;
if (!session_.init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
hangUp();
return;
}
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
session_.setTurnServers(turnURIs_);
if (!session_.acceptOffer(invite.sdp)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
hangUp();
return;
}
session_.acceptICECandidates(remoteICECandidates_);
remoteICECandidates_.clear();
}
void
CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent)
{
if (callCandidatesEvent.sender == utils::localUser().toStdString())
return;
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}",
callCandidatesEvent.content.call_id,
callCandidatesEvent.sender);
if (callid_ == callCandidatesEvent.content.call_id) {
if (onActiveCall())
session_.acceptICECandidates(callCandidatesEvent.content.candidates);
else {
// CallInvite has been received and we're awaiting localUser to accept or
// reject the call
for (const auto &c : callCandidatesEvent.content.candidates)
remoteICECandidates_.push_back(c);
}
}
}
void
CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent)
{
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}",
callAnswerEvent.content.call_id,
callAnswerEvent.sender);
if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() &&
callid_ == callAnswerEvent.content.call_id) {
emit ChatPage::instance()->showNotification("Call answered on another device.");
stopRingtone();
MainWindow::instance()->hideOverlay();
return;
}
if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) {
stopRingtone();
if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
hangUp();
}
}
}
void
CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent)
{
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}",
callHangUpEvent.content.call_id,
callHangUpReasonString(callHangUpEvent.content.reason),
callHangUpEvent.sender);
if (callid_ == callHangUpEvent.content.call_id) {
MainWindow::instance()->hideOverlay();
endCall();
}
}
void
CallManager::generateCallID()
{
using namespace std::chrono;
uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
callid_ = "c" + std::to_string(ms);
}
void
CallManager::clear()
{
roomid_.clear();
callid_.clear();
remoteICECandidates_.clear();
}
void
CallManager::endCall()
{
stopRingtone();
clear();
session_.end();
}
void
CallManager::refreshTurnServer()
{
turnURIs_.clear();
turnServerTimer_.start(2000);
}
void
CallManager::retrieveTurnServer()
{
http::client()->get_turn_server(
[this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) {
if (err) {
turnServerTimer_.setInterval(5000);
return;
}
emit turnServerRetrieved(res);
});
}
void
CallManager::playRingtone(const QString &ringtone, bool repeat)
{
static QMediaPlaylist playlist;
playlist.clear();
playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop
: QMediaPlaylist::CurrentItemOnce);
playlist.addMedia(QUrl(ringtone));
player_.setVolume(100);
player_.setPlaylist(&playlist);
}
void
CallManager::stopRingtone()
{
player_.setPlaylist(nullptr);
}
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer)
{
// gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp)
// where username and password are percent-encoded
std::vector<std::string> ret;
for (const auto &uri : turnServer.uris) {
if (auto c = uri.find(':'); c == std::string::npos) {
nhlog::ui()->error("Invalid TURN server uri: {}", uri);
continue;
} else {
std::string scheme = std::string(uri, 0, c);
if (scheme != "turn" && scheme != "turns") {
nhlog::ui()->error("Invalid TURN server uri: {}", uri);
continue;
}
QString encodedUri =
QString::fromStdString(scheme) + "://" +
QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) +
":" +
QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) +
"@" + QString::fromStdString(std::string(uri, ++c));
ret.push_back(encodedUri.toStdString());
}
}
return ret;
}
}

@ -0,0 +1,75 @@
#pragma once
#include <string>
#include <vector>
#include <QMediaPlayer>
#include <QObject>
#include <QSharedPointer>
#include <QString>
#include <QTimer>
#include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp"
namespace mtx::responses {
struct TurnServer;
}
class UserSettings;
class WebRTCSession;
class CallManager : public QObject
{
Q_OBJECT
public:
CallManager(QSharedPointer<UserSettings>);
void sendInvite(const QString &roomid);
void hangUp(
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
bool onActiveCall();
void refreshTurnServer();
public slots:
void syncEvent(const mtx::events::collections::TimelineEvents &event);
signals:
void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void turnServerRetrieved(const mtx::responses::TurnServer &);
void newCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl);
private slots:
void retrieveTurnServer();
private:
WebRTCSession &session_;
QString roomid_;
std::string callid_;
const uint32_t timeoutms_ = 120000;
std::vector<mtx::events::msg::CallCandidates::Candidate> remoteICECandidates_;
std::vector<std::string> turnURIs_;
QTimer turnServerTimer_;
QSharedPointer<UserSettings> settings_;
QMediaPlayer player_;
template<typename T>
bool handleEvent_(const mtx::events::collections::TimelineEvents &event);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &);
void answerInvite(const mtx::events::msg::CallInvite &);
void generateCallID();
void clear();
void endCall();
void playRingtone(const QString &ringtone, bool repeat);
void stopRingtone();
};

@ -22,6 +22,7 @@
#include <QShortcut>
#include <QtConcurrent>
#include "ActiveCallBar.h"
#include "AvatarProvider.h"
#include "Cache.h"
#include "Cache_p.h"
@ -40,11 +41,13 @@
#include "UserInfoWidget.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "ui/OverlayModal.h"
#include "ui/Theme.h"
#include "notifications/Manager.h"
#include "dialogs/PlaceCall.h"
#include "dialogs/ReadReceipts.h"
#include "popups/UserMentions.h"
#include "timeline/TimelineViewManager.h"
@ -68,6 +71,7 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
, isConnected_(true)
, userSettings_{userSettings}
, notificationsManager(this)
, callManager_(userSettings)
{
setObjectName("chatPage");
@ -123,11 +127,17 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
contentLayout_->setMargin(0);
top_bar_ = new TopRoomBar(this);
view_manager_ = new TimelineViewManager(userSettings_, this);
view_manager_ = new TimelineViewManager(userSettings_, &callManager_, this);
contentLayout_->addWidget(top_bar_);
contentLayout_->addWidget(view_manager_->getWidget());
activeCallBar_ = new ActiveCallBar(this);
contentLayout_->addWidget(activeCallBar_);
activeCallBar_->hide();
connect(
&callManager_, &CallManager::newCallParty, activeCallBar_, &ActiveCallBar::setCallParty);
// Splitter
splitter->addWidget(sideBar_);
splitter->addWidget(content_);
@ -446,6 +456,35 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
roomid, filename, encryptedFile, url, mime, dsize);
});
connect(text_input_, &TextInputWidget::callButtonPress, this, [this]() {
if (callManager_.onActiveCall()) {
callManager_.hangUp();
} else {
if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString());
roomInfo.member_count != 2) {
showNotification("Voice calls are limited to 1:1 rooms.");
} else {
std::vector<RoomMember> members(
cache::getMembers(current_room_.toStdString()));
const RoomMember &callee =
members.front().user_id == utils::localUser() ? members.back()
: members.front();
auto dialog = new dialogs::PlaceCall(
callee.user_id,
callee.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url),
userSettings_,
MainWindow::instance());
connect(dialog, &dialogs::PlaceCall::voice, this, [this]() {
callManager_.sendInvite(current_room_);
});
utils::centerWidget(dialog, MainWindow::instance());
dialog->show();
}
}
});
connect(room_list_, &RoomList::roomAvatarChanged, this, &ChatPage::updateTopBarAvatar);
connect(
@ -577,6 +616,11 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
connect(this, &ChatPage::dropToLoginPageCb, this, &ChatPage::dropToLoginPage);
connectCallMessage<mtx::events::msg::CallInvite>();
connectCallMessage<mtx::events::msg::CallCandidates>();
connectCallMessage<mtx::events::msg::CallAnswer>();
connectCallMessage<mtx::events::msg::CallHangUp>();
instance_ = this;
}
@ -679,6 +723,8 @@ ChatPage::bootstrap(QString userid, QString homeserver, QString token)
const bool isInitialized = cache::isInitialized();
const auto cacheVersion = cache::formatVersion();
callManager_.refreshTurnServer();
if (!isInitialized) {
cache::setCurrentFormat();
} else {
@ -1470,3 +1516,13 @@ ChatPage::initiateLogout()
emit showOverlayProgressBar();
}
template<typename T>
void
ChatPage::connectCallMessage()
{
connect(&callManager_,
qOverload<const QString &, const T &>(&CallManager::newMessage),
view_manager_,
qOverload<const QString &, const T &>(&TimelineViewManager::queueCallMessage));
}

@ -35,11 +35,13 @@
#include <QWidget>
#include "CacheStructs.h"
#include "CallManager.h"
#include "CommunitiesList.h"
#include "Utils.h"
#include "notifications/Manager.h"
#include "popups/UserMentions.h"
class ActiveCallBar;
class OverlayModal;
class QuickSwitcher;
class RoomList;
@ -50,7 +52,6 @@ class TimelineViewManager;
class TopRoomBar;
class UserInfoWidget;
class UserSettings;
class NotificationsManager;
constexpr int CONSENSUS_TIMEOUT = 1000;
constexpr int SHOW_CONTENT_TIMEOUT = 3000;
@ -216,6 +217,9 @@ private:
void showNotificationsDialog(const QPoint &point);
template<typename T>
void connectCallMessage();
QHBoxLayout *topLayout_;
Splitter *splitter;
@ -235,6 +239,7 @@ private:
TopRoomBar *top_bar_;
TextInputWidget *text_input_;
ActiveCallBar *activeCallBar_;
QTimer connectivityTimer_;
std::atomic_bool isConnected_;
@ -252,6 +257,7 @@ private:
QSharedPointer<UserSettings> userSettings_;
NotificationsManager notificationsManager;
CallManager callManager_;
};
template<class Collection>

@ -1,5 +1,7 @@
#include "EventAccessors.h"
#include <algorithm>
#include <cctype>
#include <type_traits>
namespace {
@ -65,6 +67,29 @@ struct EventRoomTopic
}
};
struct CallType
{
template<class T>
std::string operator()(const T &e)
{
if constexpr (std::is_same_v<mtx::events::RoomEvent<mtx::events::msg::CallInvite>,
T>) {
const char video[] = "m=video";
const std::string &sdp = e.content.sdp;
return std::search(sdp.cbegin(),
sdp.cend(),
std::cbegin(video),
std::cend(video) - 1,
[](unsigned char c1, unsigned char c2) {
return std::tolower(c1) == std::tolower(c2);
}) != sdp.cend()
? "video"
: "voice";
}
return std::string();
}
};
struct EventBody
{
template<class C>
@ -325,6 +350,12 @@ mtx::accessors::room_topic(const mtx::events::collections::TimelineEvents &event
return std::visit(EventRoomTopic{}, event);
}
std::string
mtx::accessors::call_type(const mtx::events::collections::TimelineEvents &event)
{
return std::visit(CallType{}, event);
}
std::string
mtx::accessors::body(const mtx::events::collections::TimelineEvents &event)
{

@ -30,6 +30,9 @@ room_name(const mtx::events::collections::TimelineEvents &event);
std::string
room_topic(const mtx::events::collections::TimelineEvents &event);
std::string
call_type(const mtx::events::collections::TimelineEvents &event);
std::string
body(const mtx::events::collections::TimelineEvents &event);

@ -17,6 +17,7 @@
#include <QApplication>
#include <QLayout>
#include <QMessageBox>
#include <QPluginLoader>
#include <QSettings>
#include <QShortcut>
@ -35,6 +36,7 @@
#include "TrayIcon.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "WelcomePage.h"
#include "ui/LoadingIndicator.h"
#include "ui/OverlayModal.h"
@ -285,6 +287,14 @@ MainWindow::showChatPage()
void
MainWindow::closeEvent(QCloseEvent *event)
{
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
if (QMessageBox::question(this, "nheko", "A call is in progress. Quit?") !=
QMessageBox::Yes) {
event->ignore();
return;
}
}
if (!qApp->isSavingSession() && isVisible() && pageSupportsTray() &&
userSettings_->tray()) {
event->ignore();
@ -433,8 +443,17 @@ void
MainWindow::openLogoutDialog()
{
auto dialog = new dialogs::Logout(this);
connect(
dialog, &dialogs::Logout::loggingOut, this, [this]() { chat_page_->initiateLogout(); });
connect(dialog, &dialogs::Logout::loggingOut, this, [this]() {
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
if (QMessageBox::question(
this, "nheko", "A call is in progress. Log out?") !=
QMessageBox::Yes) {
return;
}
WebRTCSession::instance().end();
}
chat_page_->initiateLogout();
});
showDialog(dialog);
}

@ -453,6 +453,15 @@ TextInputWidget::TextInputWidget(QWidget *parent)
topLayout_->setSpacing(0);
topLayout_->setContentsMargins(13, 1, 13, 0);
#ifdef GSTREAMER_AVAILABLE
callBtn_ = new FlatButton(this);
changeCallButtonState(WebRTCSession::State::DISCONNECTED);
connect(&WebRTCSession::instance(),
&WebRTCSession::stateChanged,
this,
&TextInputWidget::changeCallButtonState);
#endif
QIcon send_file_icon;
send_file_icon.addFile(":/icons/icons/ui/paper-clip-outline.png");
@ -521,6 +530,9 @@ TextInputWidget::TextInputWidget(QWidget *parent)
emojiBtn_->setIcon(emoji_icon);
emojiBtn_->setIconSize(QSize(ButtonHeight, ButtonHeight));
#ifdef GSTREAMER_AVAILABLE
topLayout_->addWidget(callBtn_);
#endif
topLayout_->addWidget(sendFileBtn_);
topLayout_->addWidget(input_);
topLayout_->addWidget(emojiBtn_);
@ -528,6 +540,9 @@ TextInputWidget::TextInputWidget(QWidget *parent)
setLayout(topLayout_);
#ifdef GSTREAMER_AVAILABLE
connect(callBtn_, &FlatButton::clicked, this, &TextInputWidget::callButtonPress);
#endif
connect(sendMessageBtn_, &FlatButton::clicked, input_, &FilteredTextEdit::submit);
connect(sendFileBtn_, SIGNAL(clicked()), this, SLOT(openFileSelection()));
connect(input_, &FilteredTextEdit::message, this, &TextInputWidget::sendTextMessage);
@ -652,3 +667,19 @@ TextInputWidget::paintEvent(QPaintEvent *)
style()->drawPrimitive(QStyle::PE_Widget, &opt, &p, this);
}
void
TextInputWidget::changeCallButtonState(WebRTCSession::State state)
{
QIcon icon;
if (state == WebRTCSession::State::ICEFAILED ||
state == WebRTCSession::State::DISCONNECTED) {
callBtn_->setToolTip(tr("Place a call"));
icon.addFile(":/icons/icons/ui/place-call.png");
} else {
callBtn_->setToolTip(tr("Hang up"));
icon.addFile(":/icons/icons/ui/end-call.png");
}
callBtn_->setIcon(icon);
callBtn_->setIconSize(QSize(ButtonHeight * 1.1, ButtonHeight * 1.1));
}

@ -26,6 +26,7 @@
#include <QTextEdit>
#include <QWidget>
#include "WebRTCSession.h"
#include "dialogs/PreviewUploadOverlay.h"
#include "emoji/PickButton.h"
#include "popups/SuggestionsPopup.h"
@ -149,6 +150,7 @@ public slots:
void openFileSelection();
void hideUploadSpinner();
void focusLineEdit() { input_->setFocus(); }
void changeCallButtonState(WebRTCSession::State);
private slots:
void addSelectedEmoji(const QString &emoji);
@ -161,6 +163,7 @@ signals:
void uploadMedia(const QSharedPointer<QIODevice> data,
QString mimeClass,
const QString &filename);
void callButtonPress();
void sendJoinRoomRequest(const QString &room);
void sendInviteRoomRequest(const QString &userid, const QString &reason);
@ -185,6 +188,7 @@ private:
LoadingIndicator *spinner_;
FlatButton *callBtn_;
FlatButton *sendFileBtn_;
FlatButton *sendMessageBtn_;
emoji::PickButton *emojiBtn_;

@ -77,6 +77,8 @@ UserSettings::load()
presence_ =
settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence))
.value<Presence>();
useStunServer_ = settings.value("user/use_stun_server", false).toBool();
defaultAudioSource_ = settings.value("user/default_audio_source", QString()).toString();
applyTheme();
}
@ -279,6 +281,26 @@ UserSettings::setTheme(QString theme)
emit themeChanged(theme);
}
void
UserSettings::setUseStunServer(bool useStunServer)
{
if (useStunServer == useStunServer_)
return;
useStunServer_ = useStunServer;
emit useStunServerChanged(useStunServer);
save();
}
void
UserSettings::setDefaultAudioSource(const QString &defaultAudioSource)
{
if (defaultAudioSource == defaultAudioSource_)
return;
defaultAudioSource_ = defaultAudioSource;
emit defaultAudioSourceChanged(defaultAudioSource);
save();
}
void
UserSettings::applyTheme()
{
@ -364,6 +386,8 @@ UserSettings::save()
settings.setValue("font_family", font_);
settings.setValue("emoji_font_family", emojiFont_);
settings.setValue("presence", QVariant::fromValue(presence_));
settings.setValue("use_stun_server", useStunServer_);
settings.setValue("default_audio_source", defaultAudioSource_);
settings.endGroup();
@ -429,6 +453,7 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
markdown_ = new Toggle{this};
desktopNotifications_ = new Toggle{this};
alertOnNotification_ = new Toggle{this};
useStunServer_ = new Toggle{this};
scaleFactorCombo_ = new QComboBox{this};
fontSizeCombo_ = new QComboBox{this};
fontSelectionCombo_ = new QComboBox{this};
@ -482,6 +507,15 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
timelineMaxWidthSpin_->setMaximum(100'000'000);
timelineMaxWidthSpin_->setSingleStep(10);
auto callsLabel = new QLabel{tr("CALLS"), this};
callsLabel->setFixedHeight(callsLabel->minimumHeight() + LayoutTopMargin);
callsLabel->setAlignment(Qt::AlignBottom);
callsLabel->setFont(font);
useStunServer_ = new Toggle{this};
defaultAudioSourceValue_ = new QLabel(this);
defaultAudioSourceValue_->setFont(font);
auto encryptionLabel_ = new QLabel{tr("ENCRYPTION"), this};
encryptionLabel_->setFixedHeight(encryptionLabel_->minimumHeight() + LayoutTopMargin);
encryptionLabel_->setAlignment(Qt::AlignBottom);
@ -612,6 +646,14 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
#endif
boxWrap(tr("Theme"), themeCombo_);
formLayout_->addRow(callsLabel);
formLayout_->addRow(new HorizontalLine{this});
boxWrap(tr("Allow fallback call assist server"),
useStunServer_,
tr("Will use turn.matrix.org as assist when your home server does not offer one."));
boxWrap(tr("Default audio source device"), defaultAudioSourceValue_);
formLayout_->addRow(encryptionLabel_);
formLayout_->addRow(new HorizontalLine{this});
boxWrap(tr("Device ID"), deviceIdValue_);
@ -724,6 +766,10 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
settings_->setEnlargeEmojiOnlyMessages(!disabled);
});
connect(useStunServer_, &Toggle::toggled, this, [this](bool disabled) {
settings_->setUseStunServer(!disabled);
});
connect(timelineMaxWidthSpin_,
qOverload<int>(&QSpinBox::valueChanged),
this,
@ -766,6 +812,8 @@ UserSettingsPage::showEvent(QShowEvent *)
enlargeEmojiOnlyMessages_->setState(!settings_->enlargeEmojiOnlyMessages());
deviceIdValue_->setText(QString::fromStdString(http::client()->device_id()));
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
useStunServer_->setState(!settings_->useStunServer());
defaultAudioSourceValue_->setText(settings_->defaultAudioSource());
deviceFingerprintValue_->setText(
utils::humanReadableFingerprint(olm::client()->identity_keys().ed25519));

@ -71,6 +71,10 @@ class UserSettings : public QObject
Q_PROPERTY(
QString emojiFont READ emojiFont WRITE setEmojiFontFamily NOTIFY emojiFontChanged)
Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged)
Q_PROPERTY(
bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged)
Q_PROPERTY(QString defaultAudioSource READ defaultAudioSource WRITE setDefaultAudioSource
NOTIFY defaultAudioSourceChanged)
public:
UserSettings();
@ -107,6 +111,8 @@ public:
void setAvatarCircles(bool state);
void setDecryptSidebar(bool state);
void setPresence(Presence state);
void setUseStunServer(bool state);
void setDefaultAudioSource(const QString &deviceName);
QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; }
bool messageHoverHighlight() const { return messageHoverHighlight_; }
@ -132,6 +138,8 @@ public:
QString font() const { return font_; }
QString emojiFont() const { return emojiFont_; }
Presence presence() const { return presence_; }
bool useStunServer() const { return useStunServer_; }
QString defaultAudioSource() const { return defaultAudioSource_; }
signals:
void groupViewStateChanged(bool state);
@ -154,6 +162,8 @@ signals:
void fontChanged(QString state);
void emojiFontChanged(QString state);
void presenceChanged(Presence state);
void useStunServerChanged(bool state);
void defaultAudioSourceChanged(const QString &deviceName);
private:
// Default to system theme if QT_QPA_PLATFORMTHEME var is set.
@ -181,6 +191,8 @@ private:
QString font_;
QString emojiFont_;
Presence presence_;
bool useStunServer_;
QString defaultAudioSource_;
};
class HorizontalLine : public QFrame
@ -234,9 +246,11 @@ private:
Toggle *desktopNotifications_;
Toggle *alertOnNotification_;
Toggle *avatarCircles_;
Toggle *useStunServer_;
Toggle *decryptSidebar_;
QLabel *deviceFingerprintValue_;
QLabel *deviceIdValue_;
QLabel *defaultAudioSourceValue_;
QComboBox *themeCombo_;
QComboBox *scaleFactorCombo_;

@ -35,11 +35,10 @@ createDescriptionInfo(const Event &event, const QString &localUser, const QStrin
const auto username = cache::displayName(room_id, sender);
const auto ts = QDateTime::fromMSecsSinceEpoch(msg.origin_server_ts);
return DescInfo{
QString::fromStdString(msg.event_id),
return DescInfo{QString::fromStdString(msg.event_id),
sender,
utils::messageDescription<T>(
username, QString::fromStdString(msg.content.body).trimmed(), sender == localUser),
username, utils::event_body(event).trimmed(), sender == localUser),
utils::descriptiveTime(ts),
msg.origin_server_ts,
ts};
@ -163,6 +162,9 @@ utils::getMessageDescription(const TimelineEvent &event,
using Notice = mtx::events::RoomEvent<mtx::events::msg::Notice>;
using Text = mtx::events::RoomEvent<mtx::events::msg::Text>;
using Video = mtx::events::RoomEvent<mtx::events::msg::Video>;
using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
if (std::holds_alternative<Audio>(event)) {
@ -179,6 +181,12 @@ utils::getMessageDescription(const TimelineEvent &event,
return createDescriptionInfo<Text>(event, localUser, room_id);
} else if (std::holds_alternative<Video>(event)) {
return createDescriptionInfo<Video>(event, localUser, room_id);
} else if (std::holds_alternative<CallInvite>(event)) {
return createDescriptionInfo<CallInvite>(event, localUser, room_id);
} else if (std::holds_alternative<CallAnswer>(event)) {
return createDescriptionInfo<CallAnswer>(event, localUser, room_id);
} else if (std::holds_alternative<CallHangUp>(event)) {
return createDescriptionInfo<CallHangUp>(event, localUser, room_id);
} else if (std::holds_alternative<mtx::events::Sticker>(event)) {
return createDescriptionInfo<mtx::events::Sticker>(event, localUser, room_id);
} else if (auto msg = std::get_if<Encrypted>(&event); msg != nullptr) {

@ -96,6 +96,9 @@ messageDescription(const QString &username = "",
using Sticker = mtx::events::Sticker;
using Text = mtx::events::RoomEvent<mtx::events::msg::Text>;
using Video = mtx::events::RoomEvent<mtx::events::msg::Video>;
using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
if (std::is_same<T, Audio>::value) {
@ -164,6 +167,30 @@ messageDescription(const QString &username = "",
return QCoreApplication::translate("message-description sent:",
"%1 sent an encrypted message")
.arg(username);
} else if (std::is_same<T, CallInvite>::value) {
if (isLocal)
return QCoreApplication::translate("message-description sent:",
"You placed a call");
else
return QCoreApplication::translate("message-description sent:",
"%1 placed a call")
.arg(username);
} else if (std::is_same<T, CallAnswer>::value) {
if (isLocal)
return QCoreApplication::translate("message-description sent:",
"You answered a call");
else
return QCoreApplication::translate("message-description sent:",
"%1 answered a call")
.arg(username);
} else if (std::is_same<T, CallHangUp>::value) {
if (isLocal)
return QCoreApplication::translate("message-description sent:",
"You ended a call");
else
return QCoreApplication::translate("message-description sent:",
"%1 ended a call")
.arg(username);
} else {
return QCoreApplication::translate("utils", "Unknown Message Type");
}

@ -0,0 +1,697 @@
#include <cctype>
#include "Logging.h"
#include "WebRTCSession.h"
#ifdef GSTREAMER_AVAILABLE
extern "C"
{
#include "gst/gst.h"
#include "gst/sdp/sdp.h"
#define GST_USE_UNSTABLE_API
#include "gst/webrtc/webrtc.h"
}
#endif
Q_DECLARE_METATYPE(WebRTCSession::State)
WebRTCSession::WebRTCSession()
: QObject()
{
qRegisterMetaType<WebRTCSession::State>();
connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
}
bool
WebRTCSession::init(std::string *errorMessage)
{
#ifdef GSTREAMER_AVAILABLE
if (initialised_)
return true;
GError *error = nullptr;
if (!gst_init_check(nullptr, nullptr, &error)) {
std::string strError = std::string("WebRTC: failed to initialise GStreamer: ");
if (error) {
strError += error->message;
g_error_free(error);
}
nhlog::ui()->error(strError);
if (errorMessage)
*errorMessage = strError;
return false;
}
gchar *version = gst_version_string();
std::string gstVersion(version);
g_free(version);
nhlog::ui()->info("WebRTC: initialised " + gstVersion);
// GStreamer Plugins:
// Base: audioconvert, audioresample, opus, playback, volume
// Good: autodetect, rtpmanager
// Bad: dtls, srtp, webrtc
// libnice [GLib]: nice
initialised_ = true;
std::string strError = gstVersion + ": Missing plugins: ";
const gchar *needed[] = {"audioconvert",
"audioresample",
"autodetect",
"dtls",
"nice",
"opus",
"playback",
"rtpmanager",
"srtp",
"volume",
"webrtc",
nullptr};
GstRegistry *registry = gst_registry_get();
for (guint i = 0; i < g_strv_length((gchar **)needed); i++) {
GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]);
if (!plugin) {
strError += std::string(needed[i]) + " ";
initialised_ = false;
continue;
}
gst_object_unref(plugin);
}
if (!initialised_) {
nhlog::ui()->error(strError);
if (errorMessage)
*errorMessage = strError;
}
return initialised_;
#else
(void)errorMessage;
return false;
#endif
}
#ifdef GSTREAMER_AVAILABLE
namespace {
bool isoffering_;
std::string localsdp_;
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
{
WebRTCSession *session = static_cast<WebRTCSession *>(user_data);
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
nhlog::ui()->error("WebRTC: end of stream");
session->end();
break;
case GST_MESSAGE_ERROR:
GError *error;
gchar *debug;
gst_message_parse_error(msg, &error, &debug);
nhlog::ui()->error(
"WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message);
g_clear_error(&error);
g_free(debug);
session->end();
break;
default:
break;
}
return TRUE;
}
GstWebRTCSessionDescription *
parseSDP(const std::string &sdp, GstWebRTCSDPType type)
{
GstSDPMessage *msg;
gst_sdp_message_new(&msg);
if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) {
return gst_webrtc_session_description_new(type, msg);
} else {
nhlog::ui()->error("WebRTC: failed to parse remote session description");
gst_object_unref(msg);
return nullptr;
}
}
void
setLocalDescription(GstPromise *promise, gpointer webrtc)
{
const GstStructure *reply = gst_promise_get_reply(promise);
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
GstWebRTCSessionDescription *gstsdp = nullptr;
gst_structure_get(reply,
isAnswer ? "answer" : "offer",
GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
&gstsdp,
nullptr);
gst_promise_unref(promise);
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr);
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
localsdp_ = std::string(sdp);
g_free(sdp);
gst_webrtc_session_description_free(gstsdp);
nhlog::ui()->debug(
"WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_);
}
void
createOffer(GstElement *webrtc)
{
// create-offer first, then set-local-description
GstPromise *promise =
gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
}
void
createAnswer(GstPromise *promise, gpointer webrtc)
{
// create-answer first, then set-local-description
gst_promise_unref(promise);
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
}
#if GST_CHECK_VERSION(1, 17, 0)
void
iceGatheringStateChanged(GstElement *webrtc,
GParamSpec *pspec G_GNUC_UNUSED,
gpointer user_data G_GNUC_UNUSED)
{
GstWebRTCICEGatheringState newState;
g_object_get(webrtc, "ice-gathering-state", &newState, nullptr);
if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) {
nhlog::ui()->debug("WebRTC: GstWebRTCICEGatheringState -> Complete");
if (isoffering_) {
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::OFFERSENT);
} else {
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::ANSWERSENT);
}
}
}
#else
gboolean
onICEGatheringCompletion(gpointer timerid)
{
*(guint *)(timerid) = 0;
if (isoffering_) {
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT);
} else {
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT);
}
return FALSE;
}
#endif
void
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
guint mlineIndex,
gchar *candidate,
gpointer G_GNUC_UNUSED)
{
nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate);
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) {
emit WebRTCSession::instance().newICECandidate(
{"audio", (uint16_t)mlineIndex, candidate});
return;
}
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
// GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.17.
// Use a 100ms timeout in the meantime
#if !GST_CHECK_VERSION(1, 17, 0)
static guint timerid = 0;
if (timerid)
g_source_remove(timerid);
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid);
#endif
}
void
iceConnectionStateChanged(GstElement *webrtc,
GParamSpec *pspec G_GNUC_UNUSED,
gpointer user_data G_GNUC_UNUSED)
{
GstWebRTCICEConnectionState newState;
g_object_get(webrtc, "ice-connection-state", &newState, nullptr);
switch (newState) {
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING);
break;
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED);
break;
default:
break;
}
}
void
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
{
GstCaps *caps = gst_pad_get_current_caps(newpad);
if (!caps)
return;
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
gst_caps_unref(caps);
GstPad *queuepad = nullptr;
if (g_str_has_prefix(name, "audio")) {
nhlog::ui()->debug("WebRTC: received incoming audio stream");
GstElement *queue = gst_element_factory_make("queue", nullptr);
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
gst_element_sync_state_with_parent(queue);
gst_element_sync_state_with_parent(convert);
gst_element_sync_state_with_parent(resample);
gst_element_sync_state_with_parent(sink);
gst_element_link_many(queue, convert, resample, sink, nullptr);
queuepad = gst_element_get_static_pad(queue, "sink");
}
if (queuepad) {
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad)))
nhlog::ui()->error("WebRTC: unable to link new pad");
else {
emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::CONNECTED);
}
gst_object_unref(queuepad);
}
}
void
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
{
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC)
return;
nhlog::ui()->debug("WebRTC: received incoming stream");
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr);
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
gst_bin_add(GST_BIN(pipe), decodebin);
gst_element_sync_state_with_parent(decodebin);
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
nhlog::ui()->error("WebRTC: unable to link new pad");
gst_object_unref(sinkpad);
}
std::string::const_iterator
findName(const std::string &sdp, const std::string &name)
{
return std::search(
sdp.cbegin(),
sdp.cend(),
name.cbegin(),
name.cend(),
[](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); });
}
int
getPayloadType(const std::string &sdp, const std::string &name)
{
// eg a=rtpmap:111 opus/48000/2
auto e = findName(sdp, name);
if (e == sdp.cend()) {
nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing");
return -1;
}
if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) {
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
" payload type");
return -1;
} else {
++s;
try {
return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s));
} catch (...) {
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
" payload type");
}
}
return -1;
}
}
bool
WebRTCSession::createOffer()
{
isoffering_ = true;
localsdp_.clear();
localcandidates_.clear();
return startPipeline(111); // a dynamic opus payload type
}
bool
WebRTCSession::acceptOffer(const std::string &sdp)
{
nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp);
if (state_ != State::DISCONNECTED)
return false;
isoffering_ = false;
localsdp_.clear();
localcandidates_.clear();
int opusPayloadType = getPayloadType(sdp, "opus");
if (opusPayloadType == -1)
return false;
GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
if (!offer)
return false;
if (!startPipeline(opusPayloadType)) {
gst_webrtc_session_description_free(offer);
return false;
}
// set-remote-description first, then create-answer
GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr);
g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise);
gst_webrtc_session_description_free(offer);
return true;
}
bool
WebRTCSession::acceptAnswer(const std::string &sdp)
{
nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp);
if (state_ != State::OFFERSENT)
return false;
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
if (!answer) {
end();
return false;
}
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr);
gst_webrtc_session_description_free(answer);
return true;
}
void
WebRTCSession::acceptICECandidates(
const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates)
{
if (state_ >= State::INITIATED) {
for (const auto &c : candidates) {
nhlog::ui()->debug(
"WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate);
g_signal_emit_by_name(
webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str());
}
}
}
bool
WebRTCSession::startPipeline(int opusPayloadType)
{
if (state_ != State::DISCONNECTED)
return false;
emit stateChanged(State::INITIATING);
if (!createPipeline(opusPayloadType))
return false;
webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
if (!stunServer_.empty()) {
nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_);
g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr);
}
for (const auto &uri : turnServers_) {
nhlog::ui()->info("WebRTC: setting TURN server: {}", uri);
gboolean udata;
g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata));
}
if (turnServers_.empty())
nhlog::ui()->warn("WebRTC: no TURN server provided");
// generate the offer when the pipeline goes to PLAYING
if (isoffering_)
g_signal_connect(
webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr);
// on-ice-candidate is emitted when a local ICE candidate has been gathered
g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr);
// capture ICE failure
g_signal_connect(
webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr);
// incoming streams trigger pad-added
gst_element_set_state(pipe_, GST_STATE_READY);
g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
#if GST_CHECK_VERSION(1, 17, 0)
// capture ICE gathering completion
g_signal_connect(
webrtc_, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), nullptr);
#endif
// webrtcbin lifetime is the same as that of the pipeline
gst_object_unref(webrtc_);
// start the pipeline
GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
nhlog::ui()->error("WebRTC: unable to start pipeline");
end();
return false;
}
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_));
gst_bus_add_watch(bus, newBusMessage, this);
gst_object_unref(bus);
emit stateChanged(State::INITIATED);
return true;
}
bool
WebRTCSession::createPipeline(int opusPayloadType)
{
int nSources = audioSources_ ? g_list_length(audioSources_) : 0;
if (nSources == 0) {
nhlog::ui()->error("WebRTC: no audio sources");
return false;
}
if (audioSourceIndex_ < 0 || audioSourceIndex_ >= nSources) {
nhlog::ui()->error("WebRTC: invalid audio source index");
return false;
}
GstElement *source = gst_device_create_element(
GST_DEVICE_CAST(g_list_nth_data(audioSources_, audioSourceIndex_)), nullptr);
GstElement *volume = gst_element_factory_make("volume", "srclevel");
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
GstElement *queue1 = gst_element_factory_make("queue", nullptr);
GstElement *opusenc = gst_element_factory_make("opusenc", nullptr);
GstElement *rtp = gst_element_factory_make("rtpopuspay", nullptr);
GstElement *queue2 = gst_element_factory_make("queue", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp",
"media",
G_TYPE_STRING,
"audio",
"encoding-name",
G_TYPE_STRING,
"OPUS",
"payload",
G_TYPE_INT,
opusPayloadType,
nullptr);
g_object_set(capsfilter, "caps", rtpcaps, nullptr);
gst_caps_unref(rtpcaps);
GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin");
g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr);
pipe_ = gst_pipeline_new(nullptr);
gst_bin_add_many(GST_BIN(pipe_),
source,
volume,
convert,
resample,
queue1,
opusenc,
rtp,
queue2,
capsfilter,
webrtcbin,
nullptr);
if (!gst_element_link_many(source,
volume,
convert,
resample,
queue1,
opusenc,
rtp,
queue2,
capsfilter,
webrtcbin,
nullptr)) {
nhlog::ui()->error("WebRTC: failed to link pipeline elements");
end();
return false;
}
return true;
}
bool
WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
{
if (state_ < State::INITIATED)
return false;
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
if (!srclevel)
return false;
gboolean muted;
g_object_get(srclevel, "mute", &muted, nullptr);
g_object_set(srclevel, "mute", !muted, nullptr);
gst_object_unref(srclevel);
isMuted = !muted;
return true;
}
void
WebRTCSession::end()
{
nhlog::ui()->debug("WebRTC: ending session");
if (pipe_) {
gst_element_set_state(pipe_, GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
webrtc_ = nullptr;
if (state_ != State::DISCONNECTED)
emit stateChanged(State::DISCONNECTED);
}
void
WebRTCSession::refreshDevices()
{
if (!initialised_)
return;
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_caps_unref(caps);
}
g_list_free_full(audioSources_, g_object_unref);
audioSources_ = gst_device_monitor_get_devices(monitor);
}
std::vector<std::string>
WebRTCSession::getAudioSourceNames(const std::string &defaultDevice)
{
if (!initialised_)
return {};
refreshDevices();
std::vector<std::string> ret;
ret.reserve(g_list_length(audioSources_));
for (GList *l = audioSources_; l != nullptr; l = l->next) {
gchar *name = gst_device_get_display_name(GST_DEVICE_CAST(l->data));
ret.emplace_back(name);
g_free(name);
if (ret.back() == defaultDevice) {
// move default device to top of the list
std::swap(audioSources_->data, l->data);
std::swap(ret.front(), ret.back());
}
}
return ret;
}
#else
bool
WebRTCSession::createOffer()
{
return false;
}
bool
WebRTCSession::acceptOffer(const std::string &)
{
return false;
}
bool
WebRTCSession::acceptAnswer(const std::string &)
{
return false;
}
void
WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &)
{}
bool
WebRTCSession::startPipeline(int)
{
return false;
}
bool
WebRTCSession::createPipeline(int)
{
return false;
}
bool
WebRTCSession::toggleMuteAudioSrc(bool &)
{
return false;
}
void
WebRTCSession::end()
{}
void
WebRTCSession::refreshDevices()
{}
std::vector<std::string>
WebRTCSession::getAudioSourceNames(const std::string &)
{
return {};
}
#endif

@ -0,0 +1,83 @@
#pragma once
#include <string>
#include <vector>
#include <QObject>
#include "mtx/events/voip.hpp"
typedef struct _GList GList;
typedef struct _GstElement GstElement;
class WebRTCSession : public QObject
{
Q_OBJECT
public:
enum class State
{
DISCONNECTED,
ICEFAILED,
INITIATING,
INITIATED,
OFFERSENT,
ANSWERSENT,
CONNECTING,
CONNECTED
};
static WebRTCSession &instance()
{
static WebRTCSession instance;
return instance;
}
bool init(std::string *errorMessage = nullptr);
State state() const { return state_; }
bool createOffer();
bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp);
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
bool toggleMuteAudioSrc(bool &isMuted);
void end();
void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; }
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
std::vector<std::string> getAudioSourceNames(const std::string &defaultDevice);
void setAudioSource(int audioDeviceIndex) { audioSourceIndex_ = audioDeviceIndex; }
signals:
void offerCreated(const std::string &sdp,
const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void answerCreated(const std::string &sdp,
const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt
private slots:
void setState(State state) { state_ = state; }
private:
WebRTCSession();
bool initialised_ = false;
State state_ = State::DISCONNECTED;
GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr;
std::string stunServer_;
std::vector<std::string> turnServers_;
GList *audioSources_ = nullptr;
int audioSourceIndex_ = -1;
bool startPipeline(int opusPayloadType);
bool createPipeline(int opusPayloadType);
void refreshDevices();
public:
WebRTCSession(WebRTCSession const &) = delete;
void operator=(WebRTCSession const &) = delete;
};

@ -0,0 +1,135 @@
#include <QComboBox>
#include <QLabel>
#include <QPushButton>
#include <QString>
#include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/AcceptCall.h"
#include "ui/Avatar.h"
namespace dialogs {
AcceptCall::AcceptCall(const QString &caller,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent)
: QWidget(parent)
{
std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
settings->defaultAudioSource().toStdString());
if (audioDevices_.empty()) {
emit ChatPage::instance()->showNotification(
"Incoming call: No audio sources found.");
emit close();
return;
}
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal);
setAttribute(Qt::WA_DeleteOnClose, true);
setMinimumWidth(conf::modals::MIN_WIDGET_WIDTH);
setSizePolicy(QSizePolicy::Maximum, QSizePolicy::Maximum);
auto layout = new QVBoxLayout(this);
layout->setSpacing(conf::modals::WIDGET_SPACING);
layout->setMargin(conf::modals::WIDGET_MARGIN);
QFont f;
f.setPointSizeF(f.pointSizeF());
QFont labelFont;
labelFont.setWeight(QFont::Medium);
QLabel *displayNameLabel = nullptr;
if (!displayName.isEmpty() && displayName != caller) {
displayNameLabel = new QLabel(displayName, this);
labelFont.setPointSizeF(f.pointSizeF() * 2);
displayNameLabel->setFont(labelFont);
displayNameLabel->setAlignment(Qt::AlignCenter);
}
QLabel *callerLabel = new QLabel(caller, this);
labelFont.setPointSizeF(f.pointSizeF() * 1.2);
callerLabel->setFont(labelFont);
callerLabel->setAlignment(Qt::AlignCenter);
auto avatar = new Avatar(this, QFontMetrics(f).height() * 6);
if (!avatarUrl.isEmpty())
avatar->setImage(avatarUrl);
else
avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 22;
QLabel *callTypeIndicator = new QLabel(this);
callTypeIndicator->setPixmap(
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2)));
QLabel *callTypeLabel = new QLabel("Voice Call", this);
labelFont.setPointSizeF(f.pointSizeF() * 1.1);
callTypeLabel->setFont(labelFont);
callTypeLabel->setAlignment(Qt::AlignCenter);
auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(18);
acceptBtn_ = new QPushButton(tr("Accept"), this);
acceptBtn_->setDefault(true);
acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
acceptBtn_->setIconSize(QSize(iconSize, iconSize));
rejectBtn_ = new QPushButton(tr("Reject"), this);
rejectBtn_->setIcon(QIcon(":/icons/icons/ui/end-call.png"));
rejectBtn_->setIconSize(QSize(iconSize, iconSize));
buttonLayout->addWidget(acceptBtn_);
buttonLayout->addWidget(rejectBtn_);
auto deviceLayout = new QHBoxLayout;
auto audioLabel = new QLabel(this);
audioLabel->setPixmap(
QIcon(":/icons/icons/ui/microphone-unmute.png").pixmap(QSize(iconSize, iconSize)));
auto deviceList = new QComboBox(this);
for (const auto &d : audioDevices_)
deviceList->addItem(QString::fromStdString(d));
deviceLayout->addStretch();
deviceLayout->addWidget(audioLabel);
deviceLayout->addWidget(deviceList);
if (displayNameLabel)
layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
layout->addWidget(callerLabel, 0, Qt::AlignCenter);
layout->addWidget(avatar, 0, Qt::AlignCenter);
layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter);
layout->addWidget(callTypeLabel, 0, Qt::AlignCenter);
layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout);
connect(acceptBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
settings->setDefaultAudioSource(
QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
emit accept();
emit close();
});
connect(rejectBtn_, &QPushButton::clicked, this, [this]() {
emit reject();
emit close();
});
}
}

@ -0,0 +1,37 @@
#pragma once
#include <string>
#include <vector>
#include <QSharedPointer>
#include <QWidget>
class QPushButton;
class QString;
class UserSettings;
namespace dialogs {
class AcceptCall : public QWidget
{
Q_OBJECT
public:
AcceptCall(const QString &caller,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent = nullptr);
signals:
void accept();
void reject();
private:
QPushButton *acceptBtn_;
QPushButton *rejectBtn_;
std::vector<std::string> audioDevices_;
};
}

@ -0,0 +1,104 @@
#include <QComboBox>
#include <QLabel>
#include <QPushButton>
#include <QString>
#include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/PlaceCall.h"
#include "ui/Avatar.h"
namespace dialogs {
PlaceCall::PlaceCall(const QString &callee,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent)
: QWidget(parent)
{
std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
settings->defaultAudioSource().toStdString());
if (audioDevices_.empty()) {
emit ChatPage::instance()->showNotification("No audio sources found.");
emit close();
return;
}
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal);
setAttribute(Qt::WA_DeleteOnClose, true);
auto layout = new QVBoxLayout(this);
layout->setSpacing(conf::modals::WIDGET_SPACING);
layout->setMargin(conf::modals::WIDGET_MARGIN);
auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(15);
buttonLayout->setMargin(0);
QFont f;
f.setPointSizeF(f.pointSizeF());
auto avatar = new Avatar(this, QFontMetrics(f).height() * 3);
if (!avatarUrl.isEmpty())
avatar->setImage(avatarUrl);
else
avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 18;
voiceBtn_ = new QPushButton(tr("Voice"), this);
voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
voiceBtn_->setIconSize(QSize(iconSize, iconSize));
voiceBtn_->setDefault(true);
cancelBtn_ = new QPushButton(tr("Cancel"), this);
buttonLayout->addWidget(avatar);
buttonLayout->addStretch();
buttonLayout->addWidget(voiceBtn_);
buttonLayout->addWidget(cancelBtn_);
QString name = displayName.isEmpty() ? callee : displayName;
QLabel *label = new QLabel("Place a call to " + name + "?", this);
auto deviceLayout = new QHBoxLayout;
auto audioLabel = new QLabel(this);
audioLabel->setPixmap(QIcon(":/icons/icons/ui/microphone-unmute.png")
.pixmap(QSize(iconSize * 1.2, iconSize * 1.2)));
auto deviceList = new QComboBox(this);
for (const auto &d : audioDevices_)
deviceList->addItem(QString::fromStdString(d));
deviceLayout->addStretch();
deviceLayout->addWidget(audioLabel);
deviceLayout->addWidget(deviceList);
layout->addWidget(label);
layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout);
connect(voiceBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
settings->setDefaultAudioSource(
QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
emit voice();
emit close();
});
connect(cancelBtn_, &QPushButton::clicked, this, [this]() {
emit cancel();
emit close();
});
}
}

@ -0,0 +1,37 @@
#pragma once
#include <string>
#include <vector>
#include <QSharedPointer>
#include <QWidget>
class QPushButton;
class QString;
class UserSettings;
namespace dialogs {
class PlaceCall : public QWidget
{
Q_OBJECT
public:
PlaceCall(const QString &callee,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent = nullptr);
signals:
void voice();
void cancel();
private:
QPushButton *voiceBtn_;
QPushButton *cancelBtn_;
std::vector<std::string> audioDevices_;
};
}

@ -121,6 +121,21 @@ struct RoomEventType
{
return qml_mtx_events::EventType::Redacted;
}
qml_mtx_events::EventType operator()(
const mtx::events::Event<mtx::events::msg::CallInvite> &)
{
return qml_mtx_events::EventType::CallInvite;
}
qml_mtx_events::EventType operator()(
const mtx::events::Event<mtx::events::msg::CallAnswer> &)
{
return qml_mtx_events::EventType::CallAnswer;
}
qml_mtx_events::EventType operator()(
const mtx::events::Event<mtx::events::msg::CallHangUp> &)
{
return qml_mtx_events::EventType::CallHangUp;
}
// ::EventType::Type operator()(const Event<mtx::events::msg::Location> &e) { return
// ::EventType::LocationMessage; }
};
@ -267,6 +282,7 @@ TimelineModel::roleNames() const
{RoomId, "roomId"},
{RoomName, "roomName"},
{RoomTopic, "roomTopic"},
{CallType, "callType"},
{Dump, "dump"},
};
}
@ -420,6 +436,8 @@ TimelineModel::data(const QString &id, int role) const
return QVariant(QString::fromStdString(room_name(event)));
case RoomTopic:
return QVariant(QString::fromStdString(room_topic(event)));
case CallType:
return QVariant(QString::fromStdString(call_type(event)));
case Dump: {
QVariantMap m;
auto names = roleNames();
@ -449,6 +467,7 @@ TimelineModel::data(const QString &id, int role) const
m.insert(names[ReplyTo], data(id, static_cast<int>(ReplyTo)));
m.insert(names[RoomName], data(id, static_cast<int>(RoomName)));
m.insert(names[RoomTopic], data(id, static_cast<int>(RoomTopic)));
m.insert(names[CallType], data(id, static_cast<int>(CallType)));
return QVariant(m);
}
@ -562,7 +581,7 @@ TimelineModel::addEvents(const mtx::responses::Timeline &timeline)
if (timeline.events.empty())
return;
std::vector<QString> ids = internalAddEvents(timeline.events);
std::vector<QString> ids = internalAddEvents(timeline.events, true);
if (!ids.empty()) {
beginInsertRows(QModelIndex(), 0, static_cast<int>(ids.size() - 1));
@ -596,6 +615,23 @@ isMessage(const mtx::events::EncryptedEvent<T> &)
return true;
}
auto
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &)
{
return true;
}
auto
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &)
{
return true;
}
auto
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &)
{
return true;
}
// Workaround. We also want to see a room at the top, if we just joined it
auto
isYourJoin(const mtx::events::StateEvent<mtx::events::state::Member> &e)
@ -647,7 +683,8 @@ TimelineModel::updateLastMessage()
std::vector<QString>
TimelineModel::internalAddEvents(
const std::vector<mtx::events::collections::TimelineEvents> &timeline)
const std::vector<mtx::events::collections::TimelineEvents> &timeline,
bool emitCallEvents)
{
std::vector<QString> ids;
for (auto e : timeline) {
@ -741,6 +778,55 @@ TimelineModel::internalAddEvents(
if (encInfo)
emit newEncryptedImage(encInfo.value());
if (std::holds_alternative<
mtx::events::RoomEvent<mtx::events::msg::CallCandidates>>(e_)) {
// don't display CallCandidate events to user
events.insert(id, e);
if (emitCallEvents)
emit newCallEvent(e_);
continue;
}
if (emitCallEvents) {
if (auto callInvite = std::get_if<
mtx::events::RoomEvent<mtx::events::msg::CallInvite>>(&e_)) {
callInvite->room_id = room_id_.toStdString();
emit newCallEvent(e_);
} else if (std::holds_alternative<mtx::events::RoomEvent<
mtx::events::msg::CallCandidates>>(e_) ||
std::holds_alternative<
mtx::events::RoomEvent<mtx::events::msg::CallAnswer>>(
e_) ||
std::holds_alternative<
mtx::events::RoomEvent<mtx::events::msg::CallHangUp>>(
e_)) {
emit newCallEvent(e_);
}
}
}
if (std::holds_alternative<
mtx::events::RoomEvent<mtx::events::msg::CallCandidates>>(e)) {
// don't display CallCandidate events to user
events.insert(id, e);
if (emitCallEvents)
emit newCallEvent(e);
continue;
}
if (emitCallEvents) {
if (auto callInvite =
std::get_if<mtx::events::RoomEvent<mtx::events::msg::CallInvite>>(
&e)) {
callInvite->room_id = room_id_.toStdString();
emit newCallEvent(e);
} else if (std::holds_alternative<
mtx::events::RoomEvent<mtx::events::msg::CallAnswer>>(e) ||
std::holds_alternative<
mtx::events::RoomEvent<mtx::events::msg::CallHangUp>>(e)) {
emit newCallEvent(e);
}
}
this->events.insert(id, e);
@ -798,7 +884,7 @@ TimelineModel::readEvent(const std::string &id)
void
TimelineModel::addBackwardsEvents(const mtx::responses::Messages &msgs)
{
std::vector<QString> ids = internalAddEvents(msgs.chunk);
std::vector<QString> ids = internalAddEvents(msgs.chunk, false);
if (!ids.empty()) {
beginInsertRows(QModelIndex(),
@ -1088,14 +1174,17 @@ TimelineModel::markEventsAsRead(const std::vector<QString> &event_ids)
}
void
TimelineModel::sendEncryptedMessage(const std::string &txn_id, nlohmann::json content)
TimelineModel::sendEncryptedMessageEvent(const std::string &txn_id,
nlohmann::json content,
mtx::events::EventType eventType)
{
const auto room_id = room_id_.toStdString();
using namespace mtx::events;
using namespace mtx::identifiers;
json doc = {{"type", "m.room.message"}, {"content", content}, {"room_id", room_id}};
json doc = {
{"type", mtx::events::to_string(eventType)}, {"content", content}, {"room_id", room_id}};
try {
// Check if we have already an outbound megolm session then we can use.
@ -1399,45 +1488,56 @@ struct SendMessageVisitor
, model_(model)
{}
// Do-nothing operator for all unhandled events
template<typename T>
void operator()(const mtx::events::Event<T> &)
{}
// Operator for m.room.message events that contain a msgtype in their content
template<typename T,
std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
void operator()(const mtx::events::RoomEvent<T> &msg)
template<typename T, mtx::events::EventType Event>
void sendRoomEvent(const mtx::events::RoomEvent<T> &msg)
{
if (cache::isRoomEncrypted(model_->room_id_.toStdString())) {
auto encInfo = mtx::accessors::file(msg);
if (encInfo)
emit model_->newEncryptedImage(encInfo.value());
model_->sendEncryptedMessage(txn_id_qstr_.toStdString(),
nlohmann::json(msg.content));
model_->sendEncryptedMessageEvent(
txn_id_qstr_.toStdString(), nlohmann::json(msg.content), Event);
} else {
sendUnencryptedRoomEvent<T, Event>(msg);
}
}
template<typename T, mtx::events::EventType Event>
void sendUnencryptedRoomEvent(const mtx::events::RoomEvent<T> &msg)
{
QString txn_id_qstr = txn_id_qstr_;
TimelineModel *model = model_;
http::client()->send_room_message<T, mtx::events::EventType::RoomMessage>(
http::client()->send_room_message<T, Event>(
model->room_id_.toStdString(),
txn_id_qstr.toStdString(),
msg.content,
[txn_id_qstr, model](const mtx::responses::EventId &res,
mtx::http::RequestErr err) {
if (err) {
const int status_code =
static_cast<int>(err->status_code);
const int status_code = static_cast<int>(err->status_code);
nhlog::net()->warn("[{}] failed to send message: {} {}",
txn_id_qstr.toStdString(),
err->matrix_error.error,
status_code);
emit model->messageFailed(txn_id_qstr);
}
emit model->messageSent(
txn_id_qstr, QString::fromStdString(res.event_id.to_string()));
emit model->messageSent(txn_id_qstr,
QString::fromStdString(res.event_id.to_string()));
});
}
// Do-nothing operator for all unhandled events
template<typename T>
void operator()(const mtx::events::Event<T> &)
{}
// Operator for m.room.message events that contain a msgtype in their content
template<typename T,
std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
void operator()(const mtx::events::RoomEvent<T> &msg)
{
sendRoomEvent<T, mtx::events::EventType::RoomMessage>(msg);
}
// Special operator for reactions, which are a type of m.room.message, but need to be
@ -1446,28 +1546,33 @@ struct SendMessageVisitor
// cannot handle it correctly. See the MSC for more details:
// https://github.com/matrix-org/matrix-doc/blob/matthew/msc1849/proposals/1849-aggregations.md#end-to-end-encryption
void operator()(const mtx::events::RoomEvent<mtx::events::msg::Reaction> &msg)
{
sendUnencryptedRoomEvent<mtx::events::msg::Reaction,
mtx::events::EventType::Reaction>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &event)
{
QString txn_id_qstr = txn_id_qstr_;
TimelineModel *model = model_;
http::client()
->send_room_message<mtx::events::msg::Reaction, mtx::events::EventType::Reaction>(
model->room_id_.toStdString(),
txn_id_qstr.toStdString(),
msg.content,
[txn_id_qstr, model](const mtx::responses::EventId &res,
mtx::http::RequestErr err) {
if (err) {
const int status_code = static_cast<int>(err->status_code);
nhlog::net()->warn("[{}] failed to send message: {} {}",
txn_id_qstr.toStdString(),
err->matrix_error.error,
status_code);
emit model->messageFailed(txn_id_qstr);
sendRoomEvent<mtx::events::msg::CallInvite, mtx::events::EventType::CallInvite>(
event);
}
emit model->messageSent(
txn_id_qstr, QString::fromStdString(res.event_id.to_string()));
});
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &event)
{
sendRoomEvent<mtx::events::msg::CallCandidates,
mtx::events::EventType::CallCandidates>(event);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &event)
{
sendRoomEvent<mtx::events::msg::CallAnswer, mtx::events::EventType::CallAnswer>(
event);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &event)
{
sendRoomEvent<mtx::events::msg::CallHangUp, mtx::events::EventType::CallHangUp>(
event);
}
QString txn_id_qstr_;
@ -1491,18 +1596,18 @@ TimelineModel::addPendingMessage(mtx::events::collections::TimelineEvents event)
{
std::visit(
[](auto &msg) {
msg.type = mtx::events::EventType::RoomMessage;
msg.event_id = http::client()->generate_txn_id();
msg.sender = http::client()->user_id().to_string();
msg.origin_server_ts = QDateTime::currentMSecsSinceEpoch();
},
event);
internalAddEvents({event});
internalAddEvents({event}, false);
QString txn_id_qstr = QString::fromStdString(mtx::accessors::event_id(event));
pending.push_back(txn_id_qstr);
if (!std::get_if<mtx::events::RoomEvent<mtx::events::msg::Reaction>>(&event)) {
if (!std::get_if<mtx::events::RoomEvent<mtx::events::msg::Reaction>>(&event) &&
!std::get_if<mtx::events::RoomEvent<mtx::events::msg::CallCandidates>>(&event)) {
beginInsertRows(QModelIndex(), 0, 0);
this->eventOrder.insert(this->eventOrder.begin(), txn_id_qstr);
endInsertRows();

@ -36,6 +36,12 @@ enum EventType
Aliases,
/// m.room.avatar
Avatar,
/// m.call.invite
CallInvite,
/// m.call.answer
CallAnswer,
/// m.call.hangup
CallHangUp,
/// m.room.canonical_alias
CanonicalAlias,
/// m.room.create
@ -164,6 +170,7 @@ public:
RoomId,
RoomName,
RoomTopic,
CallType,
Dump,
};
@ -200,7 +207,7 @@ public:
void updateLastMessage();
void addEvents(const mtx::responses::Timeline &events);
template<class T>
void sendMessage(const T &msg);
void sendMessageEvent(const T &content, mtx::events::EventType eventType);
RelatedInfo relatedInfo(QString id);
public slots:
@ -255,13 +262,17 @@ signals:
void typingUsersChanged(std::vector<QString> users);
void replyChanged(QString reply);
void paginationInProgressChanged(const bool);
void newCallEvent(const mtx::events::collections::TimelineEvents &event);
private:
DecryptionResult decryptEvent(
const mtx::events::EncryptedEvent<mtx::events::msg::Encrypted> &e) const;
std::vector<QString> internalAddEvents(
const std::vector<mtx::events::collections::TimelineEvents> &timeline);
void sendEncryptedMessage(const std::string &txn_id, nlohmann::json content);
const std::vector<mtx::events::collections::TimelineEvents> &timeline,
bool emitCallEvents);
void sendEncryptedMessageEvent(const std::string &txn_id,
nlohmann::json content,
mtx::events::EventType);
void handleClaimedKeys(std::shared_ptr<StateKeeper> keeper,
const std::map<std::string, std::string> &room_key,
const std::map<std::string, DevicePublicKeys> &pks,
@ -296,9 +307,10 @@ private:
template<class T>
void
TimelineModel::sendMessage(const T &msg)
TimelineModel::sendMessageEvent(const T &content, mtx::events::EventType eventType)
{
mtx::events::RoomEvent<T> msgCopy = {};
msgCopy.content = msg;
msgCopy.content = content;
msgCopy.type = eventType;
emit newMessageToSend(msgCopy);
}

@ -4,8 +4,10 @@
#include <QMetaType>
#include <QPalette>
#include <QQmlContext>
#include <QString>
#include "BlurhashProvider.h"
#include "CallManager.h"
#include "ChatPage.h"
#include "ColorImageProvider.h"
#include "DelegateChooser.h"
@ -72,10 +74,13 @@ TimelineViewManager::userStatus(QString id) const
return QString::fromStdString(cache::statusMessage(id.toStdString()));
}
TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent)
TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings,
CallManager *callManager,
QWidget *parent)
: imgProvider(new MxcImageProvider())
, colorImgProvider(new ColorImageProvider())
, blurhashProvider(new BlurhashProvider())
, callManager_(callManager)
, settings(userSettings)
{
qmlRegisterUncreatableMetaObject(qml_mtx_events::staticMetaObject,
@ -134,6 +139,10 @@ TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettin
&ChatPage::decryptSidebarChanged,
this,
&TimelineViewManager::updateEncryptedDescriptions);
connect(dynamic_cast<ChatPage *>(parent), &ChatPage::loggedOut, this, [this]() {
isInitialSync_ = true;
emit initialSyncChanged(true);
});
}
void
@ -143,7 +152,17 @@ TimelineViewManager::sync(const mtx::responses::Rooms &rooms)
// addRoom will only add the room, if it doesn't exist
addRoom(QString::fromStdString(room_id));
const auto &room_model = models.value(QString::fromStdString(room_id));
if (!isInitialSync_)
connect(room_model.data(),
&TimelineModel::newCallEvent,
callManager_,
&CallManager::syncEvent);
room_model->addEvents(room.timeline);
if (!isInitialSync_)
disconnect(room_model.data(),
&TimelineModel::newCallEvent,
callManager_,
&CallManager::syncEvent);
if (ChatPage::instance()->userSettings()->typingNotifications()) {
std::vector<QString> typing;
@ -295,7 +314,7 @@ TimelineViewManager::queueTextMessage(const QString &msg)
timeline_->resetReply();
}
timeline_->sendMessage(text);
timeline_->sendMessageEvent(text, mtx::events::EventType::RoomMessage);
}
void
@ -317,7 +336,7 @@ TimelineViewManager::queueEmoteMessage(const QString &msg)
}
if (timeline_)
timeline_->sendMessage(emote);
timeline_->sendMessageEvent(emote, mtx::events::EventType::RoomMessage);
}
void
@ -347,7 +366,7 @@ TimelineViewManager::queueReactionMessage(const QString &roomId,
reaction.relates_to.key = reactionKey.toStdString();
auto model = models.value(roomId);
model->sendMessage(reaction);
model->sendMessageEvent(reaction, mtx::events::EventType::RoomMessage);
}
void
@ -376,7 +395,7 @@ TimelineViewManager::queueImageMessage(const QString &roomid,
model->resetReply();
}
model->sendMessage(image);
model->sendMessageEvent(image, mtx::events::EventType::RoomMessage);
}
void
@ -401,7 +420,7 @@ TimelineViewManager::queueFileMessage(
model->resetReply();
}
model->sendMessage(file);
model->sendMessageEvent(file, mtx::events::EventType::RoomMessage);
}
void
@ -425,7 +444,7 @@ TimelineViewManager::queueAudioMessage(const QString &roomid,
model->resetReply();
}
model->sendMessage(audio);
model->sendMessageEvent(audio, mtx::events::EventType::RoomMessage);
}
void
@ -449,5 +468,34 @@ TimelineViewManager::queueVideoMessage(const QString &roomid,
model->resetReply();
}
model->sendMessage(video);
model->sendMessageEvent(video, mtx::events::EventType::RoomMessage);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallInvite &callInvite)
{
models.value(roomid)->sendMessageEvent(callInvite, mtx::events::EventType::CallInvite);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallCandidates &callCandidates)
{
models.value(roomid)->sendMessageEvent(callCandidates,
mtx::events::EventType::CallCandidates);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallAnswer &callAnswer)
{
models.value(roomid)->sendMessageEvent(callAnswer, mtx::events::EventType::CallAnswer);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallHangUp &callHangUp)
{
models.value(roomid)->sendMessageEvent(callHangUp, mtx::events::EventType::CallHangUp);
}

@ -18,6 +18,7 @@
class MxcImageProvider;
class BlurhashProvider;
class CallManager;
class ColorImageProvider;
class UserSettings;
@ -31,7 +32,9 @@ class TimelineViewManager : public QObject
bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged)
public:
TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent = nullptr);
TimelineViewManager(QSharedPointer<UserSettings> userSettings,
CallManager *callManager,
QWidget *parent = nullptr);
QWidget *getWidget() const { return container; }
void sync(const mtx::responses::Rooms &rooms);
@ -98,6 +101,11 @@ public slots:
const QString &url,
const QString &mime,
uint64_t dsize);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void updateEncryptedDescriptions();
private:
@ -114,6 +122,7 @@ private:
QHash<QString, QSharedPointer<TimelineModel>> models;
TimelineModel *timeline_ = nullptr;
CallManager *callManager_ = nullptr;
bool isInitialSync_ = true;

Loading…
Cancel
Save