|
|
|
@ -1,9 +1,10 @@ |
|
|
|
|
#include <cctype> |
|
|
|
|
|
|
|
|
|
#include "WebRTCSession.h" |
|
|
|
|
#include "Logging.h" |
|
|
|
|
#include "WebRTCSession.h" |
|
|
|
|
|
|
|
|
|
extern "C" { |
|
|
|
|
extern "C" |
|
|
|
|
{ |
|
|
|
|
#include "gst/gst.h" |
|
|
|
|
#include "gst/sdp/sdp.h" |
|
|
|
|
|
|
|
|
@ -13,26 +14,8 @@ extern "C" { |
|
|
|
|
|
|
|
|
|
Q_DECLARE_METATYPE(WebRTCSession::State) |
|
|
|
|
|
|
|
|
|
namespace { |
|
|
|
|
bool isoffering_; |
|
|
|
|
std::string localsdp_; |
|
|
|
|
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_; |
|
|
|
|
|
|
|
|
|
gboolean newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data); |
|
|
|
|
GstWebRTCSessionDescription* parseSDP(const std::string &sdp, GstWebRTCSDPType type); |
|
|
|
|
void generateOffer(GstElement *webrtc); |
|
|
|
|
void setLocalDescription(GstPromise *promise, gpointer webrtc); |
|
|
|
|
void addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED); |
|
|
|
|
gboolean onICEGatheringCompletion(gpointer timerid); |
|
|
|
|
void iceConnectionStateChanged(GstElement *webrtcbin, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data G_GNUC_UNUSED); |
|
|
|
|
void createAnswer(GstPromise *promise, gpointer webrtc); |
|
|
|
|
void addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe); |
|
|
|
|
void linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe); |
|
|
|
|
std::string::const_iterator findName(const std::string &sdp, const std::string &name); |
|
|
|
|
int getPayloadType(const std::string &sdp, const std::string &name); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
WebRTCSession::WebRTCSession() : QObject() |
|
|
|
|
WebRTCSession::WebRTCSession() |
|
|
|
|
: QObject() |
|
|
|
|
{ |
|
|
|
|
qRegisterMetaType<WebRTCSession::State>(); |
|
|
|
|
connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState); |
|
|
|
@ -69,10 +52,20 @@ WebRTCSession::init(std::string *errorMessage) |
|
|
|
|
// libnice [GLib]: nice
|
|
|
|
|
initialised_ = true; |
|
|
|
|
std::string strError = gstVersion + ": Missing plugins: "; |
|
|
|
|
const gchar *needed[] = {"audioconvert", "audioresample", "autodetect", "dtls", "nice", |
|
|
|
|
"opus", "playback", "rtpmanager", "srtp", "volume", "webrtc", nullptr}; |
|
|
|
|
const gchar *needed[] = {"audioconvert", |
|
|
|
|
"audioresample", |
|
|
|
|
"autodetect", |
|
|
|
|
"dtls", |
|
|
|
|
"nice", |
|
|
|
|
"opus", |
|
|
|
|
"playback", |
|
|
|
|
"rtpmanager", |
|
|
|
|
"srtp", |
|
|
|
|
"volume", |
|
|
|
|
"webrtc", |
|
|
|
|
nullptr}; |
|
|
|
|
GstRegistry *registry = gst_registry_get(); |
|
|
|
|
for (guint i = 0; i < g_strv_length((gchar**)needed); i++) { |
|
|
|
|
for (guint i = 0; i < g_strv_length((gchar **)needed); i++) { |
|
|
|
|
GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]); |
|
|
|
|
if (!plugin) { |
|
|
|
|
strError += std::string(needed[i]) + " "; |
|
|
|
@ -90,6 +83,246 @@ WebRTCSession::init(std::string *errorMessage) |
|
|
|
|
return initialised_; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
namespace { |
|
|
|
|
|
|
|
|
|
bool isoffering_; |
|
|
|
|
std::string localsdp_; |
|
|
|
|
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_; |
|
|
|
|
|
|
|
|
|
gboolean |
|
|
|
|
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) |
|
|
|
|
{ |
|
|
|
|
WebRTCSession *session = static_cast<WebRTCSession *>(user_data); |
|
|
|
|
switch (GST_MESSAGE_TYPE(msg)) { |
|
|
|
|
case GST_MESSAGE_EOS: |
|
|
|
|
nhlog::ui()->error("WebRTC: end of stream"); |
|
|
|
|
session->end(); |
|
|
|
|
break; |
|
|
|
|
case GST_MESSAGE_ERROR: |
|
|
|
|
GError *error; |
|
|
|
|
gchar *debug; |
|
|
|
|
gst_message_parse_error(msg, &error, &debug); |
|
|
|
|
nhlog::ui()->error( |
|
|
|
|
"WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message); |
|
|
|
|
g_clear_error(&error); |
|
|
|
|
g_free(debug); |
|
|
|
|
session->end(); |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
return TRUE; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
GstWebRTCSessionDescription * |
|
|
|
|
parseSDP(const std::string &sdp, GstWebRTCSDPType type) |
|
|
|
|
{ |
|
|
|
|
GstSDPMessage *msg; |
|
|
|
|
gst_sdp_message_new(&msg); |
|
|
|
|
if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) { |
|
|
|
|
return gst_webrtc_session_description_new(type, msg); |
|
|
|
|
} else { |
|
|
|
|
nhlog::ui()->error("WebRTC: failed to parse remote session description"); |
|
|
|
|
gst_object_unref(msg); |
|
|
|
|
return nullptr; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
setLocalDescription(GstPromise *promise, gpointer webrtc) |
|
|
|
|
{ |
|
|
|
|
const GstStructure *reply = gst_promise_get_reply(promise); |
|
|
|
|
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer")); |
|
|
|
|
GstWebRTCSessionDescription *gstsdp = nullptr; |
|
|
|
|
gst_structure_get(reply, |
|
|
|
|
isAnswer ? "answer" : "offer", |
|
|
|
|
GST_TYPE_WEBRTC_SESSION_DESCRIPTION, |
|
|
|
|
&gstsdp, |
|
|
|
|
nullptr); |
|
|
|
|
gst_promise_unref(promise); |
|
|
|
|
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr); |
|
|
|
|
|
|
|
|
|
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp); |
|
|
|
|
localsdp_ = std::string(sdp); |
|
|
|
|
g_free(sdp); |
|
|
|
|
gst_webrtc_session_description_free(gstsdp); |
|
|
|
|
|
|
|
|
|
nhlog::ui()->debug( |
|
|
|
|
"WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
createOffer(GstElement *webrtc) |
|
|
|
|
{ |
|
|
|
|
// create-offer first, then set-local-description
|
|
|
|
|
GstPromise *promise = |
|
|
|
|
gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
|
|
|
|
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
createAnswer(GstPromise *promise, gpointer webrtc) |
|
|
|
|
{ |
|
|
|
|
// create-answer first, then set-local-description
|
|
|
|
|
gst_promise_unref(promise); |
|
|
|
|
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
|
|
|
|
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
gboolean |
|
|
|
|
onICEGatheringCompletion(gpointer timerid) |
|
|
|
|
{ |
|
|
|
|
*(guint *)(timerid) = 0; |
|
|
|
|
if (isoffering_) { |
|
|
|
|
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT); |
|
|
|
|
} else { |
|
|
|
|
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT); |
|
|
|
|
} |
|
|
|
|
return FALSE; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, |
|
|
|
|
guint mlineIndex, |
|
|
|
|
gchar *candidate, |
|
|
|
|
gpointer G_GNUC_UNUSED) |
|
|
|
|
{ |
|
|
|
|
nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate); |
|
|
|
|
|
|
|
|
|
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) { |
|
|
|
|
emit WebRTCSession::instance().newICECandidate( |
|
|
|
|
{"audio", (uint16_t)mlineIndex, candidate}); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); |
|
|
|
|
|
|
|
|
|
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
|
|
|
|
|
// GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.18. Use a 100ms timeout in
|
|
|
|
|
// the meantime
|
|
|
|
|
static guint timerid = 0; |
|
|
|
|
if (timerid) |
|
|
|
|
g_source_remove(timerid); |
|
|
|
|
|
|
|
|
|
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
iceConnectionStateChanged(GstElement *webrtc, |
|
|
|
|
GParamSpec *pspec G_GNUC_UNUSED, |
|
|
|
|
gpointer user_data G_GNUC_UNUSED) |
|
|
|
|
{ |
|
|
|
|
GstWebRTCICEConnectionState newState; |
|
|
|
|
g_object_get(webrtc, "ice-connection-state", &newState, nullptr); |
|
|
|
|
switch (newState) { |
|
|
|
|
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: |
|
|
|
|
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking"); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING); |
|
|
|
|
break; |
|
|
|
|
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: |
|
|
|
|
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed"); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED); |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
|
|
|
|
{ |
|
|
|
|
GstCaps *caps = gst_pad_get_current_caps(newpad); |
|
|
|
|
if (!caps) |
|
|
|
|
return; |
|
|
|
|
|
|
|
|
|
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); |
|
|
|
|
gst_caps_unref(caps); |
|
|
|
|
|
|
|
|
|
GstPad *queuepad = nullptr; |
|
|
|
|
if (g_str_has_prefix(name, "audio")) { |
|
|
|
|
nhlog::ui()->debug("WebRTC: received incoming audio stream"); |
|
|
|
|
GstElement *queue = gst_element_factory_make("queue", nullptr); |
|
|
|
|
GstElement *convert = gst_element_factory_make("audioconvert", nullptr); |
|
|
|
|
GstElement *resample = gst_element_factory_make("audioresample", nullptr); |
|
|
|
|
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); |
|
|
|
|
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); |
|
|
|
|
gst_element_sync_state_with_parent(queue); |
|
|
|
|
gst_element_sync_state_with_parent(convert); |
|
|
|
|
gst_element_sync_state_with_parent(resample); |
|
|
|
|
gst_element_sync_state_with_parent(sink); |
|
|
|
|
gst_element_link_many(queue, convert, resample, sink, nullptr); |
|
|
|
|
queuepad = gst_element_get_static_pad(queue, "sink"); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (queuepad) { |
|
|
|
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) |
|
|
|
|
nhlog::ui()->error("WebRTC: unable to link new pad"); |
|
|
|
|
else { |
|
|
|
|
emit WebRTCSession::instance().stateChanged( |
|
|
|
|
WebRTCSession::State::CONNECTED); |
|
|
|
|
} |
|
|
|
|
gst_object_unref(queuepad); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
|
|
|
|
{ |
|
|
|
|
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) |
|
|
|
|
return; |
|
|
|
|
|
|
|
|
|
nhlog::ui()->debug("WebRTC: received incoming stream"); |
|
|
|
|
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); |
|
|
|
|
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); |
|
|
|
|
gst_bin_add(GST_BIN(pipe), decodebin); |
|
|
|
|
gst_element_sync_state_with_parent(decodebin); |
|
|
|
|
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); |
|
|
|
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) |
|
|
|
|
nhlog::ui()->error("WebRTC: unable to link new pad"); |
|
|
|
|
gst_object_unref(sinkpad); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
std::string::const_iterator |
|
|
|
|
findName(const std::string &sdp, const std::string &name) |
|
|
|
|
{ |
|
|
|
|
return std::search( |
|
|
|
|
sdp.cbegin(), |
|
|
|
|
sdp.cend(), |
|
|
|
|
name.cbegin(), |
|
|
|
|
name.cend(), |
|
|
|
|
[](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); }); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
int |
|
|
|
|
getPayloadType(const std::string &sdp, const std::string &name) |
|
|
|
|
{ |
|
|
|
|
// eg a=rtpmap:111 opus/48000/2
|
|
|
|
|
auto e = findName(sdp, name); |
|
|
|
|
if (e == sdp.cend()) { |
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing"); |
|
|
|
|
return -1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) { |
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + |
|
|
|
|
" payload type"); |
|
|
|
|
return -1; |
|
|
|
|
} else { |
|
|
|
|
++s; |
|
|
|
|
try { |
|
|
|
|
return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s)); |
|
|
|
|
} catch (...) { |
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + |
|
|
|
|
" payload type"); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return -1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool |
|
|
|
|
WebRTCSession::createOffer() |
|
|
|
|
{ |
|
|
|
@ -130,6 +363,38 @@ WebRTCSession::acceptOffer(const std::string &sdp) |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool |
|
|
|
|
WebRTCSession::acceptAnswer(const std::string &sdp) |
|
|
|
|
{ |
|
|
|
|
nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp); |
|
|
|
|
if (state_ != State::OFFERSENT) |
|
|
|
|
return false; |
|
|
|
|
|
|
|
|
|
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); |
|
|
|
|
if (!answer) { |
|
|
|
|
end(); |
|
|
|
|
return false; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr); |
|
|
|
|
gst_webrtc_session_description_free(answer); |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
WebRTCSession::acceptICECandidates( |
|
|
|
|
const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates) |
|
|
|
|
{ |
|
|
|
|
if (state_ >= State::INITIATED) { |
|
|
|
|
for (const auto &c : candidates) { |
|
|
|
|
nhlog::ui()->debug( |
|
|
|
|
"WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate); |
|
|
|
|
g_signal_emit_by_name( |
|
|
|
|
webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str()); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool |
|
|
|
|
WebRTCSession::startPipeline(int opusPayloadType) |
|
|
|
|
{ |
|
|
|
@ -158,14 +423,15 @@ WebRTCSession::startPipeline(int opusPayloadType) |
|
|
|
|
|
|
|
|
|
// generate the offer when the pipeline goes to PLAYING
|
|
|
|
|
if (isoffering_) |
|
|
|
|
g_signal_connect(webrtc_, "on-negotiation-needed", G_CALLBACK(generateOffer), nullptr); |
|
|
|
|
g_signal_connect( |
|
|
|
|
webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr); |
|
|
|
|
|
|
|
|
|
// on-ice-candidate is emitted when a local ICE candidate has been gathered
|
|
|
|
|
g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr); |
|
|
|
|
|
|
|
|
|
// capture ICE failure
|
|
|
|
|
g_signal_connect(webrtc_, "notify::ice-connection-state", |
|
|
|
|
G_CALLBACK(iceConnectionStateChanged), nullptr); |
|
|
|
|
g_signal_connect( |
|
|
|
|
webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr); |
|
|
|
|
|
|
|
|
|
// incoming streams trigger pad-added
|
|
|
|
|
gst_element_set_state(pipe_, GST_STATE_READY); |
|
|
|
@ -195,8 +461,10 @@ bool |
|
|
|
|
WebRTCSession::createPipeline(int opusPayloadType) |
|
|
|
|
{ |
|
|
|
|
std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin " |
|
|
|
|
"autoaudiosrc ! volume name=srclevel ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! " |
|
|
|
|
"queue ! " RTP_CAPS_OPUS + std::to_string(opusPayloadType) + " ! webrtcbin."); |
|
|
|
|
"autoaudiosrc ! volume name=srclevel ! audioconvert ! " |
|
|
|
|
"audioresample ! queue ! opusenc ! rtpopuspay ! " |
|
|
|
|
"queue ! " RTP_CAPS_OPUS + |
|
|
|
|
std::to_string(opusPayloadType) + " ! webrtcbin."); |
|
|
|
|
|
|
|
|
|
webrtc_ = nullptr; |
|
|
|
|
GError *error = nullptr; |
|
|
|
@ -210,35 +478,6 @@ WebRTCSession::createPipeline(int opusPayloadType) |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool |
|
|
|
|
WebRTCSession::acceptAnswer(const std::string &sdp) |
|
|
|
|
{ |
|
|
|
|
nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp); |
|
|
|
|
if (state_ != State::OFFERSENT) |
|
|
|
|
return false; |
|
|
|
|
|
|
|
|
|
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); |
|
|
|
|
if (!answer) { |
|
|
|
|
end(); |
|
|
|
|
return false; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr); |
|
|
|
|
gst_webrtc_session_description_free(answer); |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates) |
|
|
|
|
{ |
|
|
|
|
if (state_ >= State::INITIATED) { |
|
|
|
|
for (const auto &c : candidates) { |
|
|
|
|
nhlog::ui()->debug("WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate); |
|
|
|
|
g_signal_emit_by_name(webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str()); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool |
|
|
|
|
WebRTCSession::toggleMuteAudioSrc(bool &isMuted) |
|
|
|
|
{ |
|
|
|
@ -270,221 +509,3 @@ WebRTCSession::end() |
|
|
|
|
if (state_ != State::DISCONNECTED) |
|
|
|
|
emit stateChanged(State::DISCONNECTED); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
namespace { |
|
|
|
|
|
|
|
|
|
std::string::const_iterator findName(const std::string &sdp, const std::string &name) |
|
|
|
|
{ |
|
|
|
|
return std::search(sdp.cbegin(), sdp.cend(), name.cbegin(), name.cend(), |
|
|
|
|
[](unsigned char c1, unsigned char c2) {return std::tolower(c1) == std::tolower(c2);}); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
int getPayloadType(const std::string &sdp, const std::string &name) |
|
|
|
|
{ |
|
|
|
|
// eg a=rtpmap:111 opus/48000/2
|
|
|
|
|
auto e = findName(sdp, name); |
|
|
|
|
if (e == sdp.cend()) { |
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing"); |
|
|
|
|
return -1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) { |
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + " payload type"); |
|
|
|
|
return -1; |
|
|
|
|
} |
|
|
|
|
else { |
|
|
|
|
++s; |
|
|
|
|
try { |
|
|
|
|
return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s)); |
|
|
|
|
} |
|
|
|
|
catch(...) { |
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name + " payload type"); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return -1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
gboolean |
|
|
|
|
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) |
|
|
|
|
{ |
|
|
|
|
WebRTCSession *session = (WebRTCSession*)user_data; |
|
|
|
|
switch (GST_MESSAGE_TYPE(msg)) { |
|
|
|
|
case GST_MESSAGE_EOS: |
|
|
|
|
nhlog::ui()->error("WebRTC: end of stream"); |
|
|
|
|
session->end(); |
|
|
|
|
break; |
|
|
|
|
case GST_MESSAGE_ERROR: |
|
|
|
|
GError *error; |
|
|
|
|
gchar *debug; |
|
|
|
|
gst_message_parse_error(msg, &error, &debug); |
|
|
|
|
nhlog::ui()->error("WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message); |
|
|
|
|
g_clear_error(&error); |
|
|
|
|
g_free(debug); |
|
|
|
|
session->end(); |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
return TRUE; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
GstWebRTCSessionDescription* |
|
|
|
|
parseSDP(const std::string &sdp, GstWebRTCSDPType type) |
|
|
|
|
{ |
|
|
|
|
GstSDPMessage *msg; |
|
|
|
|
gst_sdp_message_new(&msg); |
|
|
|
|
if (gst_sdp_message_parse_buffer((guint8*)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) { |
|
|
|
|
return gst_webrtc_session_description_new(type, msg); |
|
|
|
|
} |
|
|
|
|
else { |
|
|
|
|
nhlog::ui()->error("WebRTC: failed to parse remote session description"); |
|
|
|
|
gst_object_unref(msg); |
|
|
|
|
return nullptr; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
generateOffer(GstElement *webrtc) |
|
|
|
|
{ |
|
|
|
|
// create-offer first, then set-local-description
|
|
|
|
|
GstPromise *promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
|
|
|
|
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
setLocalDescription(GstPromise *promise, gpointer webrtc) |
|
|
|
|
{ |
|
|
|
|
const GstStructure *reply = gst_promise_get_reply(promise); |
|
|
|
|
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer")); |
|
|
|
|
GstWebRTCSessionDescription *gstsdp = nullptr; |
|
|
|
|
gst_structure_get(reply, isAnswer ? "answer" : "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &gstsdp, nullptr); |
|
|
|
|
gst_promise_unref(promise); |
|
|
|
|
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr); |
|
|
|
|
|
|
|
|
|
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp); |
|
|
|
|
localsdp_ = std::string(sdp); |
|
|
|
|
g_free(sdp); |
|
|
|
|
gst_webrtc_session_description_free(gstsdp); |
|
|
|
|
|
|
|
|
|
nhlog::ui()->debug("WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED) |
|
|
|
|
{ |
|
|
|
|
nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate); |
|
|
|
|
|
|
|
|
|
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) { |
|
|
|
|
emit WebRTCSession::instance().newICECandidate({"audio", (uint16_t)mlineIndex, candidate}); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); |
|
|
|
|
|
|
|
|
|
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early
|
|
|
|
|
// fixed in v1.18
|
|
|
|
|
// use a 100ms timeout in the meantime
|
|
|
|
|
static guint timerid = 0; |
|
|
|
|
if (timerid) |
|
|
|
|
g_source_remove(timerid); |
|
|
|
|
|
|
|
|
|
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
gboolean |
|
|
|
|
onICEGatheringCompletion(gpointer timerid) |
|
|
|
|
{ |
|
|
|
|
*(guint*)(timerid) = 0; |
|
|
|
|
if (isoffering_) { |
|
|
|
|
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT); |
|
|
|
|
} |
|
|
|
|
else { |
|
|
|
|
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT); |
|
|
|
|
} |
|
|
|
|
return FALSE; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
iceConnectionStateChanged(GstElement *webrtc, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data G_GNUC_UNUSED) |
|
|
|
|
{ |
|
|
|
|
GstWebRTCICEConnectionState newState; |
|
|
|
|
g_object_get(webrtc, "ice-connection-state", &newState, nullptr); |
|
|
|
|
switch (newState) { |
|
|
|
|
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: |
|
|
|
|
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking"); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING); |
|
|
|
|
break; |
|
|
|
|
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: |
|
|
|
|
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed"); |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED); |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
createAnswer(GstPromise *promise, gpointer webrtc) |
|
|
|
|
{ |
|
|
|
|
// create-answer first, then set-local-description
|
|
|
|
|
gst_promise_unref(promise); |
|
|
|
|
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr); |
|
|
|
|
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
|
|
|
|
{ |
|
|
|
|
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) |
|
|
|
|
return; |
|
|
|
|
|
|
|
|
|
nhlog::ui()->debug("WebRTC: received incoming stream"); |
|
|
|
|
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); |
|
|
|
|
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe); |
|
|
|
|
gst_bin_add(GST_BIN(pipe), decodebin); |
|
|
|
|
gst_element_sync_state_with_parent(decodebin); |
|
|
|
|
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); |
|
|
|
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) |
|
|
|
|
nhlog::ui()->error("WebRTC: unable to link new pad"); |
|
|
|
|
gst_object_unref(sinkpad); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void |
|
|
|
|
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe) |
|
|
|
|
{ |
|
|
|
|
GstCaps *caps = gst_pad_get_current_caps(newpad); |
|
|
|
|
if (!caps) |
|
|
|
|
return; |
|
|
|
|
|
|
|
|
|
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); |
|
|
|
|
gst_caps_unref(caps); |
|
|
|
|
|
|
|
|
|
GstPad *queuepad = nullptr; |
|
|
|
|
if (g_str_has_prefix(name, "audio")) { |
|
|
|
|
nhlog::ui()->debug("WebRTC: received incoming audio stream"); |
|
|
|
|
GstElement *queue = gst_element_factory_make("queue", nullptr); |
|
|
|
|
GstElement *convert = gst_element_factory_make("audioconvert", nullptr); |
|
|
|
|
GstElement *resample = gst_element_factory_make("audioresample", nullptr); |
|
|
|
|
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); |
|
|
|
|
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); |
|
|
|
|
gst_element_sync_state_with_parent(queue); |
|
|
|
|
gst_element_sync_state_with_parent(convert); |
|
|
|
|
gst_element_sync_state_with_parent(resample); |
|
|
|
|
gst_element_sync_state_with_parent(sink); |
|
|
|
|
gst_element_link_many(queue, convert, resample, sink, nullptr); |
|
|
|
|
queuepad = gst_element_get_static_pad(queue, "sink"); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (queuepad) { |
|
|
|
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) |
|
|
|
|
nhlog::ui()->error("WebRTC: unable to link new pad"); |
|
|
|
|
else { |
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTED); |
|
|
|
|
} |
|
|
|
|
gst_object_unref(queuepad); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
} |
|
|
|
|