Require GStreamer 1.18 for voip support

pull/484/head
trilene 4 years ago
parent 8ccd2abc6a
commit c461c0aac0
  1. 4
      CMakeLists.txt
  2. 1
      resources/qml/MessageInput.qml
  3. 1
      resources/qml/voip/CallInviteBar.qml
  4. 44
      src/CallDevices.cpp
  5. 1
      src/CallDevices.h
  6. 1
      src/CallManager.cpp
  7. 1
      src/CallManager.h
  8. 1
      src/UserSettingsPage.cpp
  9. 46
      src/WebRTCSession.cpp

@ -446,11 +446,11 @@ else()
endif() endif()
include(FindPkgConfig) include(FindPkgConfig)
pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>=1.16 gstreamer-webrtc-1.0>=1.16) pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>=1.18 gstreamer-webrtc-1.0>=1.18)
if (TARGET PkgConfig::GSTREAMER) if (TARGET PkgConfig::GSTREAMER)
add_feature_info(voip ON "GStreamer found. Call support is enabled automatically.") add_feature_info(voip ON "GStreamer found. Call support is enabled automatically.")
else() else()
add_feature_info(voip OFF "GStreamer could not be found on your system. As a consequence call support has been disabled. If you don't want that, make sure gstreamer-sdp-1.0>=1.16 gstreamer-webrtc-1.0>=1.16 can be found via pkgconfig.") add_feature_info(voip OFF "GStreamer could not be found on your system. As a consequence call support has been disabled. If you don't want that, make sure gstreamer-sdp-1.0>=1.18 gstreamer-webrtc-1.0>=1.18 can be found via pkgconfig.")
endif() endif()
# single instance functionality # single instance functionality

@ -44,7 +44,6 @@ Rectangle {
} else if (CallManager.isOnCall) { } else if (CallManager.isOnCall) {
CallManager.hangUp(); CallManager.hangUp();
} else { } else {
CallManager.refreshDevices();
var dialog = placeCallDialog.createObject(timelineRoot); var dialog = placeCallDialog.createObject(timelineRoot);
dialog.open(); dialog.open();
} }

@ -75,7 +75,6 @@ Rectangle {
ToolTip.visible: hovered ToolTip.visible: hovered
ToolTip.text: qsTr("Devices") ToolTip.text: qsTr("Devices")
onClicked: { onClicked: {
CallManager.refreshDevices();
var dialog = devicesDialog.createObject(timelineRoot); var dialog = devicesDialog.createObject(timelineRoot);
dialog.open(); dialog.open();
} }

@ -152,7 +152,6 @@ addDevice(GstDevice *device)
setDefaultDevice(true); setDefaultDevice(true);
} }
#if GST_CHECK_VERSION(1, 18, 0)
template<typename T> template<typename T>
bool bool
removeDevice(T &sources, GstDevice *device, bool changed) removeDevice(T &sources, GstDevice *device, bool changed)
@ -212,7 +211,6 @@ newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data G_G
} }
return TRUE; return TRUE;
} }
#endif
template<typename T> template<typename T>
std::vector<std::string> std::vector<std::string>
@ -257,7 +255,6 @@ tokenise(std::string_view str, char delim)
void void
CallDevices::init() CallDevices::init()
{ {
#if GST_CHECK_VERSION(1, 18, 0)
static GstDeviceMonitor *monitor = nullptr; static GstDeviceMonitor *monitor = nullptr;
if (!monitor) { if (!monitor) {
monitor = gst_device_monitor_new(); monitor = gst_device_monitor_new();
@ -278,43 +275,6 @@ CallDevices::init()
return; return;
} }
} }
#endif
}
void
CallDevices::refresh()
{
#if !GST_CHECK_VERSION(1, 18, 0)
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_device_monitor_add_filter(monitor, "Audio/Duplex", caps);
gst_caps_unref(caps);
caps = gst_caps_new_empty_simple("video/x-raw");
gst_device_monitor_add_filter(monitor, "Video/Source", caps);
gst_device_monitor_add_filter(monitor, "Video/Duplex", caps);
gst_caps_unref(caps);
}
auto clearDevices = [](auto &sources) {
std::for_each(
sources.begin(), sources.end(), [](auto &s) { gst_object_unref(s.device); });
sources.clear();
};
clearDevices(audioSources_);
clearDevices(videoSources_);
GList *devices = gst_device_monitor_get_devices(monitor);
if (devices) {
for (GList *l = devices; l != nullptr; l = l->next)
addDevice(GST_DEVICE_CAST(l->data));
g_list_free(devices);
}
emit devicesChanged();
#endif
} }
bool bool
@ -400,10 +360,6 @@ CallDevices::videoDevice(std::pair<int, int> &resolution, std::pair<int, int> &f
#else #else
void
CallDevices::refresh()
{}
bool bool
CallDevices::haveMic() const CallDevices::haveMic() const
{ {

@ -19,7 +19,6 @@ public:
return instance; return instance;
} }
void refresh();
bool haveMic() const; bool haveMic() const;
bool haveCamera() const; bool haveCamera() const;
std::vector<std::string> names(bool isVideo, const std::string &defaultDevice) const; std::vector<std::string> names(bool isVideo, const std::string &defaultDevice) const;

@ -290,7 +290,6 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
haveCallInvite_ = true; haveCallInvite_ = true;
callType_ = isVideo ? CallType::VIDEO : CallType::VOICE; callType_ = isVideo ? CallType::VIDEO : CallType::VOICE;
inviteSDP_ = callInviteEvent.content.sdp; inviteSDP_ = callInviteEvent.content.sdp;
CallDevices::instance().refresh();
emit newInviteState(); emit newInviteState();
} }

@ -59,7 +59,6 @@ public:
public slots: public slots:
void sendInvite(const QString &roomid, webrtc::CallType); void sendInvite(const QString &roomid, webrtc::CallType);
void syncEvent(const mtx::events::collections::TimelineEvents &event); void syncEvent(const mtx::events::collections::TimelineEvents &event);
void refreshDevices() { CallDevices::instance().refresh(); }
void toggleMicMute(); void toggleMicMute();
void toggleCameraView() { session_.toggleCameraView(); } void toggleCameraView() { session_.toggleCameraView(); }
void acceptInvite(); void acceptInvite();

@ -1288,7 +1288,6 @@ UserSettingsPage::showEvent(QShowEvent *)
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth()); timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
privacyScreenTimeout_->setValue(settings_->privacyScreenTimeout()); privacyScreenTimeout_->setValue(settings_->privacyScreenTimeout());
CallDevices::instance().refresh();
auto mics = CallDevices::instance().names(false, settings_->microphone().toStdString()); auto mics = CallDevices::instance().names(false, settings_->microphone().toStdString());
microphoneCombo_->clear(); microphoneCombo_->clear();
for (const auto &m : mics) for (const auto &m : mics)

@ -174,7 +174,6 @@ createAnswer(GstPromise *promise, gpointer webrtc)
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise); g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
} }
#if GST_CHECK_VERSION(1, 18, 0)
void void
iceGatheringStateChanged(GstElement *webrtc, iceGatheringStateChanged(GstElement *webrtc,
GParamSpec *pspec G_GNUC_UNUSED, GParamSpec *pspec G_GNUC_UNUSED,
@ -194,23 +193,6 @@ iceGatheringStateChanged(GstElement *webrtc,
} }
} }
#else
gboolean
onICEGatheringCompletion(gpointer timerid)
{
*(guint *)(timerid) = 0;
if (WebRTCSession::instance().isOffering()) {
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(State::OFFERSENT);
} else {
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(State::ANSWERSENT);
}
return FALSE;
}
#endif
void void
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
guint mlineIndex, guint mlineIndex,
@ -218,28 +200,7 @@ addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
gpointer G_GNUC_UNUSED) gpointer G_GNUC_UNUSED)
{ {
nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate); nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate);
#if GST_CHECK_VERSION(1, 18, 0)
localcandidates_.push_back({std::string() /*max-bundle*/, (uint16_t)mlineIndex, candidate}); localcandidates_.push_back({std::string() /*max-bundle*/, (uint16_t)mlineIndex, candidate});
return;
#else
if (WebRTCSession::instance().state() >= State::OFFERSENT) {
emit WebRTCSession::instance().newICECandidate(
{std::string() /*max-bundle*/, (uint16_t)mlineIndex, candidate});
return;
}
localcandidates_.push_back({std::string() /*max-bundle*/, (uint16_t)mlineIndex, candidate});
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
// GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.18.
// Use a 1s timeout in the meantime
static guint timerid = 0;
if (timerid)
g_source_remove(timerid);
timerid = g_timeout_add(1000, onICEGatheringCompletion, &timerid);
#endif
} }
void void
@ -328,7 +289,6 @@ testPacketLoss(gpointer G_GNUC_UNUSED)
return FALSE; return FALSE;
} }
#if GST_CHECK_VERSION(1, 18, 0)
void void
setWaitForKeyFrame(GstBin *decodebin G_GNUC_UNUSED, GstElement *element, gpointer G_GNUC_UNUSED) setWaitForKeyFrame(GstBin *decodebin G_GNUC_UNUSED, GstElement *element, gpointer G_GNUC_UNUSED)
{ {
@ -337,7 +297,6 @@ setWaitForKeyFrame(GstBin *decodebin G_GNUC_UNUSED, GstElement *element, gpointe
"rtpvp8depay")) "rtpvp8depay"))
g_object_set(element, "wait-for-keyframe", TRUE, nullptr); g_object_set(element, "wait-for-keyframe", TRUE, nullptr);
} }
#endif
GstElement * GstElement *
newAudioSinkChain(GstElement *pipe) newAudioSinkChain(GstElement *pipe)
@ -537,9 +496,7 @@ addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
// hardware decoding needs investigation; eg rendering fails if vaapi plugin installed // hardware decoding needs investigation; eg rendering fails if vaapi plugin installed
g_object_set(decodebin, "force-sw-decoders", TRUE, nullptr); g_object_set(decodebin, "force-sw-decoders", TRUE, nullptr);
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
#if GST_CHECK_VERSION(1, 18, 0)
g_signal_connect(decodebin, "element-added", G_CALLBACK(setWaitForKeyFrame), nullptr); g_signal_connect(decodebin, "element-added", G_CALLBACK(setWaitForKeyFrame), nullptr);
#endif
gst_bin_add(GST_BIN(pipe), decodebin); gst_bin_add(GST_BIN(pipe), decodebin);
gst_element_sync_state_with_parent(decodebin); gst_element_sync_state_with_parent(decodebin);
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
@ -810,11 +767,10 @@ WebRTCSession::startPipeline(int opusPayloadType, int vp8PayloadType)
gst_element_set_state(pipe_, GST_STATE_READY); gst_element_set_state(pipe_, GST_STATE_READY);
g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_); g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
#if GST_CHECK_VERSION(1, 18, 0)
// capture ICE gathering completion // capture ICE gathering completion
g_signal_connect( g_signal_connect(
webrtc_, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), nullptr); webrtc_, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), nullptr);
#endif
// webrtcbin lifetime is the same as that of the pipeline // webrtcbin lifetime is the same as that of the pipeline
gst_object_unref(webrtc_); gst_object_unref(webrtc_);

Loading…
Cancel
Save