Skip to content

Instantly share code, notes, and snippets.

@CoreyCole
Created June 10, 2020 18:45
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save CoreyCole/fc46c4b8c5cae324cba19e16a960dd0c to your computer and use it in GitHub Desktop.
Save CoreyCole/fc46c4b8c5cae324cba19e16a960dd0c to your computer and use it in GitHub Desktop.
static gboolean
start_pipeline (void)
{
GstStateChangeReturn ret;
GError *error = NULL;
// GstCaps *audio_caps;
// GstWebRTCRTPTransceiver *trans = NULL;
// GArray *transceivers = NULL;
pipe1 =
gst_parse_launch ("webrtcbin bundle-policy=max-bundle name=sendrecv "
STUN_SERVER
TURN_SERVER
"videotestsrc is-live=true pattern=ball ! videoconvert ! queue ! vp8enc deadline=1 ! rtpvp8pay ! "
"queue ! " RTP_CAPS_VP8 "96 ! sendrecv. "
"audiotestsrc is-live=true wave=ticks ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! "
"queue ! " RTP_CAPS_OPUS "97 ! sendrecv. ", &error);
if (error) {
g_printerr ("Failed to parse launch: %s\n", error->message);
g_error_free (error);
goto err;
}
webrtc1 = gst_bin_get_by_name (GST_BIN (pipe1), "sendrecv");
g_assert_nonnull (webrtc1);
/* This is the gstwebrtc entry point where we create the offer and so on. It
* will be called when the pipeline goes to PLAYING. */
g_signal_connect (webrtc1, "on-negotiation-needed",
G_CALLBACK (on_negotiation_needed), NULL);
/* We need to transmit this ICE candidate to the browser via the websockets
* signalling server. Incoming ice candidates from the browser need to be
* added by us too, see on_server_message() */
g_signal_connect (webrtc1, "on-ice-candidate",
G_CALLBACK (send_ice_candidate_message), NULL);
g_signal_connect (webrtc1, "notify::ice-gathering-state",
G_CALLBACK (on_ice_gathering_state_notify), NULL);
gst_element_set_state (pipe1, GST_STATE_READY);
/* Incoming streams will be exposed via this signal */
g_signal_connect (webrtc1, "pad-added", G_CALLBACK (on_incoming_stream), pipe1);
// Create a 2nd transceiver for the receive only audio stream
// audio_caps = gst_caps_from_string ("application/x-rtp,media=audio,encoding-name=OPUS,payload=97");
// g_signal_emit_by_name (webrtc1, "add-transceiver", GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, audio_caps, &trans);
// gst_caps_unref (audio_caps);
// if (trans != NULL) {
// trans->direction = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY;
// gst_object_unref(trans);
// } else {
// g_print ("audio transceiver null!\n");
// }
// g_signal_emit_by_name(webrtc1, "get-transceivers", &transceivers);
// g_assert(transceivers != NULL && transceivers->len == 2);
/* Lifetime is the same as the pipeline itself */
gst_object_unref (webrtc1);
g_print ("Starting pipeline\n");
ret = gst_element_set_state (GST_ELEMENT (pipe1), GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
goto err;
return TRUE;
err:
if (pipe1)
g_clear_object (&pipe1);
if (webrtc1)
webrtc1 = NULL;
return FALSE;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment