Allow choice of single window when sharing screen

pull/484/head
trilene 4 years ago
parent 70c77cdc44
commit efe240d609
  1. 5
      CMakeLists.txt
  2. 37
      resources/qml/voip/ScreenShare.qml
  3. 83
      src/CallManager.cpp
  4. 4
      src/CallManager.h
  5. 23
      src/WebRTCSession.cpp
  6. 3
      src/WebRTCSession.h

@ -448,6 +448,7 @@ endif()
include(FindPkgConfig)
pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>=1.18 gstreamer-webrtc-1.0>=1.18)
if (TARGET PkgConfig::GSTREAMER)
pkg_check_modules(XCB IMPORTED_TARGET xcb xcb-ewmh)
add_feature_info(voip ON "GStreamer found. Call support is enabled automatically.")
else()
add_feature_info(voip OFF "GStreamer could not be found on your system. As a consequence call support has been disabled. If you don't want that, make sure gstreamer-sdp-1.0>=1.18 gstreamer-webrtc-1.0>=1.18 can be found via pkgconfig.")
@ -637,6 +638,10 @@ endif()
if (TARGET PkgConfig::GSTREAMER)
target_link_libraries(nheko PRIVATE PkgConfig::GSTREAMER)
target_compile_definitions(nheko PRIVATE GSTREAMER_AVAILABLE)
if (TARGET PkgConfig::XCB)
target_link_libraries(nheko PRIVATE PkgConfig::XCB)
target_compile_definitions(nheko PRIVATE XCB_AVAILABLE)
endif()
endif()
if(MSVC)

@ -13,9 +13,6 @@ Popup {
anchors.centerIn = parent;
frameRateCombo.currentIndex = frameRateCombo.find(Settings.screenShareFrameRate);
pipCheckBox.checked = Settings.screenSharePiP;
remoteVideoCheckBox.checked = Settings.screenShareRemoteVideo;
hideCursorCheckBox.checked = Settings.screenShareHideCursor;
}
palette: colors
@ -33,6 +30,27 @@ Popup {
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: 8
Label {
Layout.alignment: Qt.AlignLeft
text: qsTr("Window:")
color: colors.windowText
}
ComboBox {
id: windowCombo
Layout.fillWidth: true
model: CallManager.windowList()
}
}
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: 8
Label {
Layout.alignment: Qt.AlignLeft
@ -43,7 +61,7 @@ Popup {
ComboBox {
id: frameRateCombo
Layout.alignment: Qt.AlignRight
Layout.fillWidth: true
model: ["25", "20", "15", "10", "5", "2", "1"]
}
@ -52,7 +70,8 @@ Popup {
CheckBox {
id: pipCheckBox
visible: CallManager.cameras.length > 0
enabled: CallManager.cameras.length > 0
checked: Settings.screenSharePiP
Layout.alignment: Qt.AlignLeft
Layout.leftMargin: 8
Layout.rightMargin: 8
@ -66,6 +85,7 @@ Popup {
Layout.leftMargin: 8
Layout.rightMargin: 8
text: qsTr("Request remote camera")
checked: Settings.screenShareRemoteVideo
ToolTip.text: qsTr("View your callee's camera like a regular video call")
ToolTip.visible: hovered
}
@ -76,7 +96,9 @@ Popup {
Layout.alignment: Qt.AlignLeft
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: 8
text: qsTr("Hide mouse cursor")
checked: Settings.screenShareHideCursor
}
RowLayout {
@ -92,11 +114,14 @@ Popup {
onClicked: {
if (buttonLayout.validateMic()) {
Settings.microphone = micCombo.currentText;
if (pipCheckBox.checked)
Settings.camera = cameraCombo.currentText;
Settings.screenShareFrameRate = frameRateCombo.currentText;
Settings.screenSharePiP = pipCheckBox.checked;
Settings.screenShareRemoteVideo = remoteVideoCheckBox.checked;
Settings.screenShareHideCursor = hideCursorCheckBox.checked;
CallManager.sendInvite(TimelineManager.timeline.roomId(), CallType.SCREEN);
CallManager.sendInvite(TimelineManager.timeline.roomId(), CallType.SCREEN, windowCombo.currentIndex);
close();
}
}

@ -3,6 +3,7 @@
#include <chrono>
#include <cstdint>
#include <cstdlib>
#include <memory>
#include <QMediaPlaylist>
#include <QUrl>
@ -18,6 +19,11 @@
#include "mtx/responses/turn_server.hpp"
#ifdef XCB_AVAILABLE
#include <xcb/xcb.h>
#include <xcb/xcb_ewmh.h>
#endif
Q_DECLARE_METATYPE(std::vector<mtx::events::msg::CallCandidates::Candidate>)
Q_DECLARE_METATYPE(mtx::events::msg::CallCandidates::Candidate)
Q_DECLARE_METATYPE(mtx::responses::TurnServer)
@ -151,12 +157,18 @@ CallManager::CallManager(QObject *parent)
}
void
CallManager::sendInvite(const QString &roomid, CallType callType)
CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int windowIndex)
{
if (isOnCall())
return;
if (callType == CallType::SCREEN && !screenShareSupported())
return;
if (callType == CallType::SCREEN) {
if (!screenShareSupported())
return;
if (windows_.empty() || windowIndex >= windows_.size()) {
nhlog::ui()->error("WebRTC: window index out of range");
return;
}
}
auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
if (roomInfo.member_count != 2) {
@ -187,7 +199,7 @@ CallManager::sendInvite(const QString &roomid, CallType callType)
callPartyAvatarUrl_ = QString::fromStdString(roomInfo.avatar_url);
emit newInviteState();
playRingtone(QUrl("qrc:/media/media/ringback.ogg"), true);
if (!session_.createOffer(callType)) {
if (!session_.createOffer(callType, windows_[windowIndex].second)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
endCall();
}
@ -490,6 +502,69 @@ CallManager::stopRingtone()
player_.setPlaylist(nullptr);
}
QStringList
CallManager::windowList()
{
windows_.clear();
windows_.push_back({"Entire screen", 0});
#ifdef XCB_AVAILABLE
std::unique_ptr<xcb_connection_t, std::function<void(xcb_connection_t *)>> connection(
xcb_connect(nullptr, nullptr), [](xcb_connection_t *c) { xcb_disconnect(c); });
if (xcb_connection_has_error(connection.get())) {
nhlog::ui()->error("Failed to connect to X server");
return {};
}
xcb_ewmh_connection_t ewmh;
if (!xcb_ewmh_init_atoms_replies(
&ewmh, xcb_ewmh_init_atoms(connection.get(), &ewmh), nullptr)) {
nhlog::ui()->error("Failed to connect to EWMH server");
return {};
}
std::unique_ptr<xcb_ewmh_connection_t, std::function<void(xcb_ewmh_connection_t *)>>
ewmhconnection(&ewmh, [](xcb_ewmh_connection_t *c) { xcb_ewmh_connection_wipe(c); });
for (int i = 0; i < ewmh.nb_screens; i++) {
xcb_ewmh_get_windows_reply_t clients;
if (!xcb_ewmh_get_client_list_reply(
&ewmh, xcb_ewmh_get_client_list(&ewmh, i), &clients, nullptr)) {
nhlog::ui()->error("Failed to request window list");
return {};
}
for (uint32_t w = 0; w < clients.windows_len; w++) {
xcb_window_t window = clients.windows[w];
std::string name;
xcb_ewmh_get_utf8_strings_reply_t data;
auto getName = [](xcb_ewmh_get_utf8_strings_reply_t *r) {
std::string name(r->strings, r->strings_len);
xcb_ewmh_get_utf8_strings_reply_wipe(r);
return name;
};
xcb_get_property_cookie_t cookie = xcb_ewmh_get_wm_name(&ewmh, window);
if (xcb_ewmh_get_wm_name_reply(&ewmh, cookie, &data, nullptr))
name = getName(&data);
cookie = xcb_ewmh_get_wm_visible_name(&ewmh, window);
if (xcb_ewmh_get_wm_visible_name_reply(&ewmh, cookie, &data, nullptr))
name = getName(&data);
windows_.push_back({QString::fromStdString(name), window});
}
xcb_ewmh_get_windows_reply_wipe(&clients);
}
#endif
QStringList ret;
ret.reserve(windows_.size());
for (const auto &w : windows_)
ret.append(w.first);
return ret;
}
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer)

@ -55,13 +55,14 @@ public:
static bool screenShareSupported();
public slots:
void sendInvite(const QString &roomid, webrtc::CallType);
void sendInvite(const QString &roomid, webrtc::CallType, unsigned int windowIndex = 0);
void syncEvent(const mtx::events::collections::TimelineEvents &event);
void toggleMicMute();
void toggleLocalPiP() { session_.toggleLocalPiP(); }
void acceptInvite();
void hangUp(
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
QStringList windowList();
signals:
void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
@ -91,6 +92,7 @@ private:
std::vector<std::string> turnURIs_;
QTimer turnServerTimer_;
QMediaPlayer player_;
std::vector<std::pair<QString, uint32_t>> windows_;
template<typename T>
bool handleEvent_(const mtx::events::collections::TimelineEvents &event);

@ -362,7 +362,7 @@ getResolution(GstElement *pipe, const gchar *elementName, const gchar *padName)
}
std::pair<int, int>
getPiPDimensions(const std::pair<int, int> resolution, int fullWidth, double scaleFactor)
getPiPDimensions(const std::pair<int, int> &resolution, int fullWidth, double scaleFactor)
{
int pipWidth = fullWidth * scaleFactor;
int pipHeight = static_cast<double>(resolution.second) / resolution.first * pipWidth;
@ -629,11 +629,12 @@ WebRTCSession::havePlugins(bool isVideo, std::string *errorMessage)
}
bool
WebRTCSession::createOffer(CallType callType)
WebRTCSession::createOffer(CallType callType, uint32_t shareWindowId)
{
clear();
isOffering_ = true;
callType_ = callType;
isOffering_ = true;
callType_ = callType;
shareWindowId_ = shareWindowId;
// opus and vp8 rtp payload types must be defined dynamically
// therefore from the range [96-127]
@ -888,15 +889,12 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
if (callType_ == CallType::VIDEO && !devices_.haveCamera())
return !isOffering_;
auto settings = ChatPage::instance()->userSettings();
if (callType_ == CallType::SCREEN && settings->screenSharePiP() && !devices_.haveCamera())
return false;
auto settings = ChatPage::instance()->userSettings();
GstElement *camerafilter = nullptr;
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *tee = gst_element_factory_make("tee", "videosrctee");
gst_bin_add_many(GST_BIN(pipe_), videoconvert, tee, nullptr);
if (callType_ == CallType::VIDEO || settings->screenSharePiP()) {
if (callType_ == CallType::VIDEO || (settings->screenSharePiP() && devices_.haveCamera())) {
std::pair<int, int> resolution;
std::pair<int, int> frameRate;
GstDevice *device = devices_.videoDevice(resolution, frameRate);
@ -947,7 +945,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
return false;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", 0, nullptr);
g_object_set(ximagesrc, "xid", shareWindowId_, nullptr);
g_object_set(
ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
@ -962,7 +960,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
gst_caps_unref(caps);
gst_bin_add_many(GST_BIN(pipe_), ximagesrc, capsfilter, nullptr);
if (settings->screenSharePiP()) {
if (settings->screenSharePiP() && devices_.haveCamera()) {
GstElement *compositor = gst_element_factory_make("compositor", nullptr);
g_object_set(compositor, "background", 1, nullptr);
gst_bin_add(GST_BIN(pipe_), compositor);
@ -1101,6 +1099,7 @@ WebRTCSession::clear()
pipe_ = nullptr;
webrtc_ = nullptr;
busWatchId_ = 0;
shareWindowId_ = 0;
haveAudioStream_ = false;
haveVideoStream_ = false;
localPiPSinkPad_ = nullptr;
@ -1143,7 +1142,7 @@ WebRTCSession::haveLocalPiP() const
return false;
}
bool WebRTCSession::createOffer(webrtc::CallType) { return false; }
bool WebRTCSession::createOffer(webrtc::CallType, uint32_t) { return false; }
bool
WebRTCSession::acceptOffer(const std::string &)

@ -57,7 +57,7 @@ public:
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; }
bool createOffer(webrtc::CallType);
bool createOffer(webrtc::CallType, uint32_t shareWindowId);
bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp);
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
@ -100,6 +100,7 @@ private:
GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0;
std::vector<std::string> turnServers_;
uint32_t shareWindowId_ = 0;
bool init(std::string *errorMessage = nullptr);
bool startPipeline(int opusPayloadType, int vp8PayloadType);

Loading…
Cancel
Save