Add audio input device selector

pull/237/head
trilene 4 years ago
parent 02dfc8039f
commit df65093374
  1. 1
      src/CallManager.cpp
  2. 1
      src/ChatPage.cpp
  3. 19
      src/UserSettingsPage.cpp
  4. 7
      src/UserSettingsPage.h
  5. 116
      src/WebRTCSession.cpp
  6. 7
      src/WebRTCSession.h
  7. 44
      src/dialogs/AcceptCall.cpp
  8. 7
      src/dialogs/AcceptCall.h
  9. 42
      src/dialogs/PlaceCall.cpp
  10. 7
      src/dialogs/PlaceCall.h

@ -264,6 +264,7 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
caller.display_name, caller.display_name,
QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url), QString::fromStdString(roomInfo.avatar_url),
settings_,
MainWindow::instance()); MainWindow::instance());
connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() { connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() {
MainWindow::instance()->hideOverlay(); MainWindow::instance()->hideOverlay();

@ -474,6 +474,7 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
callee.display_name, callee.display_name,
QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url), QString::fromStdString(roomInfo.avatar_url),
userSettings_,
MainWindow::instance()); MainWindow::instance());
connect(dialog, &dialogs::PlaceCall::voice, this, [this]() { connect(dialog, &dialogs::PlaceCall::voice, this, [this]() {
callManager_.sendInvite(current_room_); callManager_.sendInvite(current_room_);

@ -78,6 +78,7 @@ UserSettings::load()
settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence)) settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence))
.value<Presence>(); .value<Presence>();
useStunServer_ = settings.value("user/use_stun_server", false).toBool(); useStunServer_ = settings.value("user/use_stun_server", false).toBool();
defaultAudioSource_ = settings.value("user/default_audio_source", QString()).toString();
applyTheme(); applyTheme();
} }
@ -290,6 +291,16 @@ UserSettings::setUseStunServer(bool useStunServer)
save(); save();
} }
void
UserSettings::setDefaultAudioSource(const QString &defaultAudioSource)
{
if (defaultAudioSource == defaultAudioSource_)
return;
defaultAudioSource_ = defaultAudioSource;
emit defaultAudioSourceChanged(defaultAudioSource);
save();
}
void void
UserSettings::applyTheme() UserSettings::applyTheme()
{ {
@ -376,6 +387,7 @@ UserSettings::save()
settings.setValue("emoji_font_family", emojiFont_); settings.setValue("emoji_font_family", emojiFont_);
settings.setValue("presence", QVariant::fromValue(presence_)); settings.setValue("presence", QVariant::fromValue(presence_));
settings.setValue("use_stun_server", useStunServer_); settings.setValue("use_stun_server", useStunServer_);
settings.setValue("default_audio_source", defaultAudioSource_);
settings.endGroup(); settings.endGroup();
@ -501,6 +513,9 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
callsLabel->setFont(font); callsLabel->setFont(font);
useStunServer_ = new Toggle{this}; useStunServer_ = new Toggle{this};
defaultAudioSourceValue_ = new QLabel(this);
defaultAudioSourceValue_->setFont(font);
auto encryptionLabel_ = new QLabel{tr("ENCRYPTION"), this}; auto encryptionLabel_ = new QLabel{tr("ENCRYPTION"), this};
encryptionLabel_->setFixedHeight(encryptionLabel_->minimumHeight() + LayoutTopMargin); encryptionLabel_->setFixedHeight(encryptionLabel_->minimumHeight() + LayoutTopMargin);
encryptionLabel_->setAlignment(Qt::AlignBottom); encryptionLabel_->setAlignment(Qt::AlignBottom);
@ -634,9 +649,10 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
formLayout_->addRow(callsLabel); formLayout_->addRow(callsLabel);
formLayout_->addRow(new HorizontalLine{this}); formLayout_->addRow(new HorizontalLine{this});
boxWrap(tr("Allow Fallback Call Assist Server"), boxWrap(tr("Allow fallback call assist server"),
useStunServer_, useStunServer_,
tr("Will use turn.matrix.org as assist when your home server does not offer one.")); tr("Will use turn.matrix.org as assist when your home server does not offer one."));
boxWrap(tr("Default audio source device"), defaultAudioSourceValue_);
formLayout_->addRow(encryptionLabel_); formLayout_->addRow(encryptionLabel_);
formLayout_->addRow(new HorizontalLine{this}); formLayout_->addRow(new HorizontalLine{this});
@ -797,6 +813,7 @@ UserSettingsPage::showEvent(QShowEvent *)
deviceIdValue_->setText(QString::fromStdString(http::client()->device_id())); deviceIdValue_->setText(QString::fromStdString(http::client()->device_id()));
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth()); timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
useStunServer_->setState(!settings_->useStunServer()); useStunServer_->setState(!settings_->useStunServer());
defaultAudioSourceValue_->setText(settings_->defaultAudioSource());
deviceFingerprintValue_->setText( deviceFingerprintValue_->setText(
utils::humanReadableFingerprint(olm::client()->identity_keys().ed25519)); utils::humanReadableFingerprint(olm::client()->identity_keys().ed25519));

@ -73,6 +73,8 @@ class UserSettings : public QObject
Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged) Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged)
Q_PROPERTY( Q_PROPERTY(
bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged) bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged)
Q_PROPERTY(QString defaultAudioSource READ defaultAudioSource WRITE setDefaultAudioSource
NOTIFY defaultAudioSourceChanged)
public: public:
UserSettings(); UserSettings();
@ -110,6 +112,7 @@ public:
void setDecryptSidebar(bool state); void setDecryptSidebar(bool state);
void setPresence(Presence state); void setPresence(Presence state);
void setUseStunServer(bool state); void setUseStunServer(bool state);
void setDefaultAudioSource(const QString &deviceName);
QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; } QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; }
bool messageHoverHighlight() const { return messageHoverHighlight_; } bool messageHoverHighlight() const { return messageHoverHighlight_; }
@ -136,6 +139,7 @@ public:
QString emojiFont() const { return emojiFont_; } QString emojiFont() const { return emojiFont_; }
Presence presence() const { return presence_; } Presence presence() const { return presence_; }
bool useStunServer() const { return useStunServer_; } bool useStunServer() const { return useStunServer_; }
QString defaultAudioSource() const { return defaultAudioSource_; }
signals: signals:
void groupViewStateChanged(bool state); void groupViewStateChanged(bool state);
@ -159,6 +163,7 @@ signals:
void emojiFontChanged(QString state); void emojiFontChanged(QString state);
void presenceChanged(Presence state); void presenceChanged(Presence state);
void useStunServerChanged(bool state); void useStunServerChanged(bool state);
void defaultAudioSourceChanged(const QString &deviceName);
private: private:
// Default to system theme if QT_QPA_PLATFORMTHEME var is set. // Default to system theme if QT_QPA_PLATFORMTHEME var is set.
@ -187,6 +192,7 @@ private:
QString emojiFont_; QString emojiFont_;
Presence presence_; Presence presence_;
bool useStunServer_; bool useStunServer_;
QString defaultAudioSource_;
}; };
class HorizontalLine : public QFrame class HorizontalLine : public QFrame
@ -244,6 +250,7 @@ private:
Toggle *decryptSidebar_; Toggle *decryptSidebar_;
QLabel *deviceFingerprintValue_; QLabel *deviceFingerprintValue_;
QLabel *deviceIdValue_; QLabel *deviceIdValue_;
QLabel *defaultAudioSourceValue_;
QComboBox *themeCombo_; QComboBox *themeCombo_;
QComboBox *scaleFactorCombo_; QComboBox *scaleFactorCombo_;

@ -487,23 +487,74 @@ WebRTCSession::startPipeline(int opusPayloadType)
return true; return true;
} }
#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload="
bool bool
WebRTCSession::createPipeline(int opusPayloadType) WebRTCSession::createPipeline(int opusPayloadType)
{ {
std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin " int nSources = audioSources_ ? g_list_length(audioSources_) : 0;
"autoaudiosrc ! volume name=srclevel ! audioconvert ! " if (nSources == 0) {
"audioresample ! queue ! opusenc ! rtpopuspay ! " nhlog::ui()->error("WebRTC: no audio sources");
"queue ! " RTP_CAPS_OPUS + return false;
std::to_string(opusPayloadType) + " ! webrtcbin."); }
webrtc_ = nullptr; if (audioSourceIndex_ < 0 || audioSourceIndex_ >= nSources) {
GError *error = nullptr; nhlog::ui()->error("WebRTC: invalid audio source index");
pipe_ = gst_parse_launch(pipeline.c_str(), &error); return false;
if (error) { }
nhlog::ui()->error("WebRTC: failed to parse pipeline: {}", error->message);
g_error_free(error); GstElement *source = gst_device_create_element(
GST_DEVICE_CAST(g_list_nth_data(audioSources_, audioSourceIndex_)), nullptr);
GstElement *volume = gst_element_factory_make("volume", "srclevel");
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
GstElement *queue1 = gst_element_factory_make("queue", nullptr);
GstElement *opusenc = gst_element_factory_make("opusenc", nullptr);
GstElement *rtp = gst_element_factory_make("rtpopuspay", nullptr);
GstElement *queue2 = gst_element_factory_make("queue", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp",
"media",
G_TYPE_STRING,
"audio",
"encoding-name",
G_TYPE_STRING,
"OPUS",
"payload",
G_TYPE_INT,
opusPayloadType,
nullptr);
g_object_set(capsfilter, "caps", rtpcaps, nullptr);
gst_caps_unref(rtpcaps);
GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin");
g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr);
pipe_ = gst_pipeline_new(nullptr);
gst_bin_add_many(GST_BIN(pipe_),
source,
volume,
convert,
resample,
queue1,
opusenc,
rtp,
queue2,
capsfilter,
webrtcbin,
nullptr);
if (!gst_element_link_many(source,
volume,
convert,
resample,
queue1,
opusenc,
rtp,
queue2,
capsfilter,
webrtcbin,
nullptr)) {
nhlog::ui()->error("WebRTC: failed to link pipeline elements");
end(); end();
return false; return false;
} }
@ -541,3 +592,42 @@ WebRTCSession::end()
if (state_ != State::DISCONNECTED) if (state_ != State::DISCONNECTED)
emit stateChanged(State::DISCONNECTED); emit stateChanged(State::DISCONNECTED);
} }
void
WebRTCSession::refreshDevices()
{
if (!initialised_)
return;
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_caps_unref(caps);
}
g_list_free_full(audioSources_, g_object_unref);
audioSources_ = gst_device_monitor_get_devices(monitor);
}
std::vector<std::string>
WebRTCSession::getAudioSourceNames(const std::string &defaultDevice)
{
if (!initialised_)
return {};
refreshDevices();
std::vector<std::string> ret;
ret.reserve(g_list_length(audioSources_));
for (GList *l = audioSources_; l != nullptr; l = l->next) {
gchar *name = gst_device_get_display_name(GST_DEVICE_CAST(l->data));
ret.emplace_back(name);
g_free(name);
if (ret.back() == defaultDevice) {
// move default device to top of the list
std::swap(audioSources_->data, l->data);
std::swap(ret.front(), ret.back());
}
}
return ret;
}

@ -7,6 +7,7 @@
#include "mtx/events/voip.hpp" #include "mtx/events/voip.hpp"
typedef struct _GList GList;
typedef struct _GstElement GstElement; typedef struct _GstElement GstElement;
class WebRTCSession : public QObject class WebRTCSession : public QObject
@ -46,6 +47,9 @@ public:
void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; } void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; }
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; } void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
std::vector<std::string> getAudioSourceNames(const std::string &defaultDevice);
void setAudioSource(int audioDeviceIndex) { audioSourceIndex_ = audioDeviceIndex; }
signals: signals:
void offerCreated(const std::string &sdp, void offerCreated(const std::string &sdp,
const std::vector<mtx::events::msg::CallCandidates::Candidate> &); const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
@ -66,9 +70,12 @@ private:
GstElement *webrtc_ = nullptr; GstElement *webrtc_ = nullptr;
std::string stunServer_; std::string stunServer_;
std::vector<std::string> turnServers_; std::vector<std::string> turnServers_;
GList *audioSources_ = nullptr;
int audioSourceIndex_ = -1;
bool startPipeline(int opusPayloadType); bool startPipeline(int opusPayloadType);
bool createPipeline(int opusPayloadType); bool createPipeline(int opusPayloadType);
void refreshDevices();
public: public:
WebRTCSession(WebRTCSession const &) = delete; WebRTCSession(WebRTCSession const &) = delete;

@ -1,11 +1,14 @@
#include <QComboBox>
#include <QLabel> #include <QLabel>
#include <QPixmap>
#include <QPushButton> #include <QPushButton>
#include <QString> #include <QString>
#include <QVBoxLayout> #include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h" #include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h" #include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/AcceptCall.h" #include "dialogs/AcceptCall.h"
#include "ui/Avatar.h" #include "ui/Avatar.h"
@ -15,9 +18,25 @@ AcceptCall::AcceptCall(const QString &caller,
const QString &displayName, const QString &displayName,
const QString &roomName, const QString &roomName,
const QString &avatarUrl, const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent) QWidget *parent)
: QWidget(parent) : QWidget(parent)
{ {
std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
settings->defaultAudioSource().toStdString());
if (audioDevices_.empty()) {
emit ChatPage::instance()->showNotification(
"Incoming call: No audio sources found.");
emit close();
return;
}
setAutoFillBackground(true); setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal); setWindowModality(Qt::WindowModal);
@ -55,7 +74,7 @@ AcceptCall::AcceptCall(const QString &caller,
else else
avatar->setLetter(utils::firstChar(roomName)); avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 24; const int iconSize = 22;
QLabel *callTypeIndicator = new QLabel(this); QLabel *callTypeIndicator = new QLabel(this);
callTypeIndicator->setPixmap( callTypeIndicator->setPixmap(
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2))); QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2)));
@ -66,7 +85,7 @@ AcceptCall::AcceptCall(const QString &caller,
callTypeLabel->setAlignment(Qt::AlignCenter); callTypeLabel->setAlignment(Qt::AlignCenter);
auto buttonLayout = new QHBoxLayout; auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(20); buttonLayout->setSpacing(18);
acceptBtn_ = new QPushButton(tr("Accept"), this); acceptBtn_ = new QPushButton(tr("Accept"), this);
acceptBtn_->setDefault(true); acceptBtn_->setDefault(true);
acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
@ -78,6 +97,19 @@ AcceptCall::AcceptCall(const QString &caller,
buttonLayout->addWidget(acceptBtn_); buttonLayout->addWidget(acceptBtn_);
buttonLayout->addWidget(rejectBtn_); buttonLayout->addWidget(rejectBtn_);
auto deviceLayout = new QHBoxLayout;
auto audioLabel = new QLabel(this);
audioLabel->setPixmap(
QIcon(":/icons/icons/ui/microphone-unmute.png").pixmap(QSize(iconSize, iconSize)));
auto deviceList = new QComboBox(this);
for (const auto &d : audioDevices_)
deviceList->addItem(QString::fromStdString(d));
deviceLayout->addStretch();
deviceLayout->addWidget(audioLabel);
deviceLayout->addWidget(deviceList);
if (displayNameLabel) if (displayNameLabel)
layout->addWidget(displayNameLabel, 0, Qt::AlignCenter); layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
layout->addWidget(callerLabel, 0, Qt::AlignCenter); layout->addWidget(callerLabel, 0, Qt::AlignCenter);
@ -85,8 +117,12 @@ AcceptCall::AcceptCall(const QString &caller,
layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter); layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter);
layout->addWidget(callTypeLabel, 0, Qt::AlignCenter); layout->addWidget(callTypeLabel, 0, Qt::AlignCenter);
layout->addLayout(buttonLayout); layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout);
connect(acceptBtn_, &QPushButton::clicked, this, [this]() { connect(acceptBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
settings->setDefaultAudioSource(
QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
emit accept(); emit accept();
emit close(); emit close();
}); });

@ -1,9 +1,14 @@
#pragma once #pragma once
#include <string>
#include <vector>
#include <QSharedPointer>
#include <QWidget> #include <QWidget>
class QPushButton; class QPushButton;
class QString; class QString;
class UserSettings;
namespace dialogs { namespace dialogs {
@ -16,6 +21,7 @@ public:
const QString &displayName, const QString &displayName,
const QString &roomName, const QString &roomName,
const QString &avatarUrl, const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent = nullptr); QWidget *parent = nullptr);
signals: signals:
@ -25,6 +31,7 @@ signals:
private: private:
QPushButton *acceptBtn_; QPushButton *acceptBtn_;
QPushButton *rejectBtn_; QPushButton *rejectBtn_;
std::vector<std::string> audioDevices_;
}; };
} }

@ -1,10 +1,14 @@
#include <QComboBox>
#include <QLabel> #include <QLabel>
#include <QPushButton> #include <QPushButton>
#include <QString> #include <QString>
#include <QVBoxLayout> #include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h" #include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h" #include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/PlaceCall.h" #include "dialogs/PlaceCall.h"
#include "ui/Avatar.h" #include "ui/Avatar.h"
@ -14,9 +18,24 @@ PlaceCall::PlaceCall(const QString &callee,
const QString &displayName, const QString &displayName,
const QString &roomName, const QString &roomName,
const QString &avatarUrl, const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent) QWidget *parent)
: QWidget(parent) : QWidget(parent)
{ {
std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
settings->defaultAudioSource().toStdString());
if (audioDevices_.empty()) {
emit ChatPage::instance()->showNotification("No audio sources found.");
emit close();
return;
}
setAutoFillBackground(true); setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal); setWindowModality(Qt::WindowModal);
@ -37,25 +56,42 @@ PlaceCall::PlaceCall(const QString &callee,
avatar->setImage(avatarUrl); avatar->setImage(avatarUrl);
else else
avatar->setLetter(utils::firstChar(roomName)); avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 24; const int iconSize = 18;
voiceBtn_ = new QPushButton(tr("Voice"), this); voiceBtn_ = new QPushButton(tr("Voice"), this);
voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
voiceBtn_->setIconSize(QSize(iconSize, iconSize)); voiceBtn_->setIconSize(QSize(iconSize, iconSize));
voiceBtn_->setDefault(true); voiceBtn_->setDefault(true);
cancelBtn_ = new QPushButton(tr("Cancel"), this); cancelBtn_ = new QPushButton(tr("Cancel"), this);
buttonLayout->addStretch(1);
buttonLayout->addWidget(avatar); buttonLayout->addWidget(avatar);
buttonLayout->addStretch();
buttonLayout->addWidget(voiceBtn_); buttonLayout->addWidget(voiceBtn_);
buttonLayout->addWidget(cancelBtn_); buttonLayout->addWidget(cancelBtn_);
QString name = displayName.isEmpty() ? callee : displayName; QString name = displayName.isEmpty() ? callee : displayName;
QLabel *label = new QLabel("Place a call to " + name + "?", this); QLabel *label = new QLabel("Place a call to " + name + "?", this);
auto deviceLayout = new QHBoxLayout;
auto audioLabel = new QLabel(this);
audioLabel->setPixmap(QIcon(":/icons/icons/ui/microphone-unmute.png")
.pixmap(QSize(iconSize * 1.2, iconSize * 1.2)));
auto deviceList = new QComboBox(this);
for (const auto &d : audioDevices_)
deviceList->addItem(QString::fromStdString(d));
deviceLayout->addStretch();
deviceLayout->addWidget(audioLabel);
deviceLayout->addWidget(deviceList);
layout->addWidget(label); layout->addWidget(label);
layout->addLayout(buttonLayout); layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout);
connect(voiceBtn_, &QPushButton::clicked, this, [this]() { connect(voiceBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
settings->setDefaultAudioSource(
QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
emit voice(); emit voice();
emit close(); emit close();
}); });

@ -1,9 +1,14 @@
#pragma once #pragma once
#include <string>
#include <vector>
#include <QSharedPointer>
#include <QWidget> #include <QWidget>
class QPushButton; class QPushButton;
class QString; class QString;
class UserSettings;
namespace dialogs { namespace dialogs {
@ -16,6 +21,7 @@ public:
const QString &displayName, const QString &displayName,
const QString &roomName, const QString &roomName,
const QString &avatarUrl, const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent = nullptr); QWidget *parent = nullptr);
signals: signals:
@ -25,6 +31,7 @@ signals:
private: private:
QPushButton *voiceBtn_; QPushButton *voiceBtn_;
QPushButton *cancelBtn_; QPushButton *cancelBtn_;
std::vector<std::string> audioDevices_;
}; };
} }

Loading…
Cancel
Save