2 * Copyright (C) 2018 Metrological Group B.V.
3 * Author: Thibault Saunier <tsaunier@igalia.com>
4 * Author: Alejandro G. Castro <alex@igalia.com>
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public License
17 * aint with this library; see the file COPYING.LIB. If not, write to
18 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
24 #if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER)
25 #include "GStreamerMediaStreamSource.h"
27 #include "AudioTrackPrivate.h"
28 #include "GStreamerAudioData.h"
29 #include "GStreamerCommon.h"
30 #include "GStreamerVideoCaptureSource.h"
31 #include "MediaSampleGStreamer.h"
32 #include "VideoTrackPrivate.h"
34 #include <gst/app/gstappsrc.h>
35 #include <gst/base/gstflowcombiner.h>
37 #if GST_CHECK_VERSION(1, 10, 0)
41 static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* gstsample);
42 static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, GstSample* gstsample);
43 static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate&);
44 static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType);
46 static GstStaticPadTemplate videoSrcTemplate = GST_STATIC_PAD_TEMPLATE("video_src",
49 GST_STATIC_CAPS("video/x-raw;video/x-h264;video/x-vp8"));
51 static GstStaticPadTemplate audioSrcTemplate = GST_STATIC_PAD_TEMPLATE("audio_src",
54 GST_STATIC_CAPS("audio/x-raw(ANY);"));
56 static GstTagList* mediaStreamTrackPrivateGetTags(MediaStreamTrackPrivate* track)
58 auto taglist = gst_tag_list_new_empty();
60 if (!track->label().isEmpty()) {
61 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND,
62 GST_TAG_TITLE, track->label().utf8().data(), nullptr);
65 if (track->type() == RealtimeMediaSource::Type::Audio) {
66 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_KIND,
67 static_cast<int>(AudioTrackPrivate::Kind::Main), nullptr);
68 } else if (track->type() == RealtimeMediaSource::Type::Video) {
69 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_KIND,
70 static_cast<int>(VideoTrackPrivate::Kind::Main), nullptr);
72 if (track->isCaptureTrack()) {
73 GStreamerVideoCaptureSource& source = static_cast<GStreamerVideoCaptureSource&>(
76 gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND,
77 WEBKIT_MEDIA_TRACK_TAG_WIDTH, source.size().width(),
78 WEBKIT_MEDIA_TRACK_TAG_HEIGHT, source.size().height(), nullptr);
85 GstStream* webkitMediaStreamNew(MediaStreamTrackPrivate* track)
87 GRefPtr<GstCaps> caps;
90 if (track->type() == RealtimeMediaSource::Type::Audio) {
91 caps = adoptGRef(gst_static_pad_template_get_caps(&audioSrcTemplate));
92 type = GST_STREAM_TYPE_AUDIO;
93 } else if (track->type() == RealtimeMediaSource::Type::Video) {
94 caps = adoptGRef(gst_static_pad_template_get_caps(&videoSrcTemplate));
95 type = GST_STREAM_TYPE_VIDEO;
97 GST_FIXME("Handle %d type", static_cast<int>(track->type()));
102 auto gststream = (GstStream*)gst_stream_new(track->id().utf8().data(),
103 caps.get(), type, GST_STREAM_FLAG_SELECT);
104 auto tags = adoptGRef(mediaStreamTrackPrivateGetTags(track));
105 gst_stream_set_tags(gststream, tags.get());
110 class WebKitMediaStreamTrackObserver
111 : public MediaStreamTrackPrivate::Observer {
113 virtual ~WebKitMediaStreamTrackObserver() { };
114 WebKitMediaStreamTrackObserver(WebKitMediaStreamSrc* src)
115 : m_mediaStreamSrc(src) { }
116 void trackStarted(MediaStreamTrackPrivate&) final { };
118 void trackEnded(MediaStreamTrackPrivate& track) final
120 webkitMediaStreamSrcTrackEnded(m_mediaStreamSrc, track);
123 void trackMutedChanged(MediaStreamTrackPrivate&) final { };
124 void trackSettingsChanged(MediaStreamTrackPrivate&) final { };
125 void trackEnabledChanged(MediaStreamTrackPrivate&) final { };
126 void readyStateChanged(MediaStreamTrackPrivate&) final { };
128 void sampleBufferUpdated(MediaStreamTrackPrivate&, MediaSample& sample) final
130 auto gstsample = static_cast<MediaSampleGStreamer*>(&sample)->platformSample().sample.gstSample;
132 webkitMediaStreamSrcPushVideoSample(m_mediaStreamSrc, gstsample);
135 void audioSamplesAvailable(MediaStreamTrackPrivate&, const MediaTime&, const PlatformAudioData& audioData, const AudioStreamDescription&, size_t) final
137 auto audiodata = static_cast<const GStreamerAudioData&>(audioData);
139 webkitMediaStreamSrcPushAudioSample(m_mediaStreamSrc, audiodata.getSample());
143 WebKitMediaStreamSrc* m_mediaStreamSrc;
146 class WebKitMediaStreamObserver
147 : public MediaStreamPrivate::Observer {
149 virtual ~WebKitMediaStreamObserver() { };
150 WebKitMediaStreamObserver(WebKitMediaStreamSrc* src)
151 : m_mediaStreamSrc(src) { }
153 void characteristicsChanged() final { GST_DEBUG_OBJECT(m_mediaStreamSrc.get(), "renegotiation should happen"); }
154 void activeStatusChanged() final { }
156 void didAddTrack(MediaStreamTrackPrivate& track) final
158 webkitMediaStreamSrcAddTrack(m_mediaStreamSrc.get(), &track, false);
161 void didRemoveTrack(MediaStreamTrackPrivate& track) final
163 webkitMediaStreamSrcRemoveTrackByType(m_mediaStreamSrc.get(), track.type());
167 GRefPtr<WebKitMediaStreamSrc> m_mediaStreamSrc;
170 typedef struct _WebKitMediaStreamSrcClass WebKitMediaStreamSrcClass;
171 struct _WebKitMediaStreamSrc {
172 GstBin parent_instance;
176 GstElement* audioSrc;
177 GstClockTime firstAudioBufferPts;
178 GstElement* videoSrc;
179 GstClockTime firstFramePts;
181 std::unique_ptr<WebKitMediaStreamTrackObserver> mediaStreamTrackObserver;
182 std::unique_ptr<WebKitMediaStreamObserver> mediaStreamObserver;
184 RefPtr<MediaStreamPrivate> stream;
185 RefPtr<MediaStreamTrackPrivate> track;
187 GstFlowCombiner* flowCombiner;
188 GRefPtr<GstStreamCollection> streamCollection;
191 struct _WebKitMediaStreamSrcClass {
192 GstBinClass parent_class;
201 static GstURIType webkit_media_stream_src_uri_get_type(GType)
206 static const gchar* const* webkit_media_stream_src_uri_get_protocols(GType)
208 static const gchar* protocols[] = { "mediastream", nullptr };
213 static gchar* webkit_media_stream_src_uri_get_uri(GstURIHandler* handler)
215 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
217 /* FIXME: make thread-safe */
218 return g_strdup(self->uri);
221 static gboolean webkitMediaStreamSrcUriSetUri(GstURIHandler* handler, const gchar* uri,
224 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
225 self->uri = g_strdup(uri);
230 static void webkitMediaStreamSrcUriHandlerInit(gpointer g_iface, gpointer)
232 GstURIHandlerInterface* iface = (GstURIHandlerInterface*)g_iface;
234 iface->get_type = webkit_media_stream_src_uri_get_type;
235 iface->get_protocols = webkit_media_stream_src_uri_get_protocols;
236 iface->get_uri = webkit_media_stream_src_uri_get_uri;
237 iface->set_uri = webkitMediaStreamSrcUriSetUri;
240 GST_DEBUG_CATEGORY_STATIC(webkitMediaStreamSrcDebug);
241 #define GST_CAT_DEFAULT webkitMediaStreamSrcDebug
244 G_IMPLEMENT_INTERFACE(GST_TYPE_URI_HANDLER, webkitMediaStreamSrcUriHandlerInit); \
245 GST_DEBUG_CATEGORY_INIT(webkitMediaStreamSrcDebug, "webkitwebmediastreamsrc", 0, "mediastreamsrc element"); \
246 gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_WIDTH, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream width", "Webkit MediaStream width", gst_tag_merge_use_first); \
247 gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_HEIGHT, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream height", "Webkit MediaStream height", gst_tag_merge_use_first); \
248 gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_KIND, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream Kind", "Webkit MediaStream Kind", gst_tag_merge_use_first);
250 G_DEFINE_TYPE_WITH_CODE(WebKitMediaStreamSrc, webkit_media_stream_src, GST_TYPE_BIN, doInit);
252 static void webkitMediaStreamSrcSetProperty(GObject* object, guint prop_id,
253 const GValue*, GParamSpec* pspec)
257 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
262 static void webkitMediaStreamSrcGetProperty(GObject* object, guint prop_id, GValue* value,
267 g_value_set_boolean(value, TRUE);
270 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
275 static void webkitMediaStreamSrcDispose(GObject* object)
277 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
279 if (self->audioSrc) {
280 gst_bin_remove(GST_BIN(self), self->audioSrc);
281 self->audioSrc = nullptr;
284 if (self->videoSrc) {
285 gst_bin_remove(GST_BIN(self), self->videoSrc);
286 self->videoSrc = nullptr;
290 static void webkitMediaStreamSrcFinalize(GObject* object)
292 WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
294 GST_OBJECT_LOCK(self);
296 for (auto& track : self->stream->tracks())
297 track->removeObserver(*self->mediaStreamTrackObserver.get());
299 self->stream->removeObserver(*self->mediaStreamObserver);
300 self->stream = nullptr;
302 GST_OBJECT_UNLOCK(self);
304 g_clear_pointer(&self->uri, g_free);
305 gst_flow_combiner_free(self->flowCombiner);
308 static GstStateChangeReturn webkitMediaStreamSrcChangeState(GstElement* element, GstStateChange transition)
310 GstStateChangeReturn result;
311 auto* self = WEBKIT_MEDIA_STREAM_SRC(element);
313 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
315 GST_OBJECT_LOCK(self);
317 for (auto& track : self->stream->tracks())
318 track->removeObserver(*self->mediaStreamTrackObserver.get());
319 } else if (self->track)
320 self->track->removeObserver(*self->mediaStreamTrackObserver.get());
321 GST_OBJECT_UNLOCK(self);
324 result = GST_ELEMENT_CLASS(webkit_media_stream_src_parent_class)->change_state(element, transition);
326 if (transition == GST_STATE_CHANGE_READY_TO_PAUSED)
327 result = GST_STATE_CHANGE_NO_PREROLL;
332 static void webkit_media_stream_src_class_init(WebKitMediaStreamSrcClass* klass)
334 GObjectClass* gobject_class = G_OBJECT_CLASS(klass);
335 GstElementClass* gstelement_klass = GST_ELEMENT_CLASS(klass);
337 gobject_class->finalize = webkitMediaStreamSrcFinalize;
338 gobject_class->dispose = webkitMediaStreamSrcDispose;
339 gobject_class->get_property = webkitMediaStreamSrcGetProperty;
340 gobject_class->set_property = webkitMediaStreamSrcSetProperty;
342 g_object_class_install_property(gobject_class, PROP_IS_LIVE,
343 g_param_spec_boolean("is-live", "Is Live",
344 "Let playbin3 know we are a live source.",
345 TRUE, (GParamFlags)(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
347 gstelement_klass->change_state = webkitMediaStreamSrcChangeState;
348 gst_element_class_add_pad_template(gstelement_klass,
349 gst_static_pad_template_get(&videoSrcTemplate));
350 gst_element_class_add_pad_template(gstelement_klass,
351 gst_static_pad_template_get(&audioSrcTemplate));
354 static void webkit_media_stream_src_init(WebKitMediaStreamSrc* self)
356 self->mediaStreamTrackObserver = std::make_unique<WebKitMediaStreamTrackObserver>(self);
357 self->mediaStreamObserver = std::make_unique<WebKitMediaStreamObserver>(self);
358 self->flowCombiner = gst_flow_combiner_new();
359 self->firstAudioBufferPts = GST_CLOCK_TIME_NONE;
360 self->firstFramePts = GST_CLOCK_TIME_NONE;
364 WebKitMediaStreamSrc* self;
365 RefPtr<MediaStreamTrackPrivate> track;
366 GstStaticPadTemplate* pad_template;
369 static GstFlowReturn webkitMediaStreamSrcChain(GstPad* pad, GstObject* parent, GstBuffer* buffer)
371 GstFlowReturn result, chain_result;
372 GRefPtr<WebKitMediaStreamSrc> self = adoptGRef(WEBKIT_MEDIA_STREAM_SRC(gst_object_get_parent(parent)));
374 chain_result = gst_proxy_pad_chain_default(pad, GST_OBJECT(self.get()), buffer);
375 result = gst_flow_combiner_update_pad_flow(self.get()->flowCombiner, pad, chain_result);
377 if (result == GST_FLOW_FLUSHING)
383 static void webkitMediaStreamSrcAddPad(WebKitMediaStreamSrc* self, GstPad* target, GstStaticPadTemplate* pad_template)
385 auto padname = String::format("src_%u", g_atomic_int_add(&(self->npads), 1));
386 auto ghostpad = gst_ghost_pad_new_from_template(padname.utf8().data(), target,
387 gst_static_pad_template_get(pad_template));
389 GST_DEBUG_OBJECT(self, "%s Ghosting %" GST_PTR_FORMAT,
390 gst_object_get_path_string(GST_OBJECT_CAST(self)),
393 auto proxypad = adoptGRef(GST_PAD(gst_proxy_pad_get_internal(GST_PROXY_PAD(ghostpad))));
394 gst_pad_set_active(ghostpad, TRUE);
395 if (!gst_element_add_pad(GST_ELEMENT(self), GST_PAD(ghostpad))) {
396 GST_ERROR_OBJECT(self, "Could not add pad %s:%s", GST_DEBUG_PAD_NAME(ghostpad));
397 ASSERT_NOT_REACHED();
402 gst_flow_combiner_add_pad(self->flowCombiner, proxypad.get());
403 gst_pad_set_chain_function(proxypad.get(),
404 static_cast<GstPadChainFunction>(webkitMediaStreamSrcChain));
407 static GstPadProbeReturn webkitMediaStreamSrcPadProbeCb(GstPad* pad, GstPadProbeInfo* info, ProbeData* data)
409 GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info);
410 WebKitMediaStreamSrc* self = data->self;
412 switch (GST_EVENT_TYPE(event)) {
413 case GST_EVENT_STREAM_START: {
414 const gchar* stream_id;
415 GRefPtr<GstStream> stream = nullptr;
417 gst_event_parse_stream_start(event, &stream_id);
418 if (!g_strcmp0(stream_id, data->track->id().utf8().data())) {
419 GST_INFO_OBJECT(pad, "Event has been sticked already");
420 return GST_PAD_PROBE_OK;
423 auto stream_start = gst_event_new_stream_start(data->track->id().utf8().data());
424 gst_event_set_group_id(stream_start, 1);
425 gst_event_unref(event);
427 gst_pad_push_event(pad, stream_start);
428 gst_pad_push_event(pad, gst_event_new_tag(mediaStreamTrackPrivateGetTags(data->track.get())));
430 webkitMediaStreamSrcAddPad(self, pad, data->pad_template);
432 return GST_PAD_PROBE_HANDLED;
438 return GST_PAD_PROBE_OK;
441 static gboolean webkitMediaStreamSrcSetupSrc(WebKitMediaStreamSrc* self,
442 MediaStreamTrackPrivate* track, GstElement* element,
443 GstStaticPadTemplate* pad_template, gboolean observe_track,
446 auto pad = adoptGRef(gst_element_get_static_pad(element, "src"));
448 gst_bin_add(GST_BIN(self), element);
451 ProbeData* data = new ProbeData;
452 data->self = WEBKIT_MEDIA_STREAM_SRC(self);
453 data->pad_template = pad_template;
456 gst_pad_add_probe(pad.get(), (GstPadProbeType)GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
457 (GstPadProbeCallback)webkitMediaStreamSrcPadProbeCb, data,
459 delete (ProbeData*)data;
462 webkitMediaStreamSrcAddPad(self, pad.get(), pad_template);
465 track->addObserver(*self->mediaStreamTrackObserver.get());
467 gst_element_sync_state_with_parent(element);
471 static gboolean webkitMediaStreamSrcSetupAppSrc(WebKitMediaStreamSrc* self,
472 MediaStreamTrackPrivate* track, GstElement** element,
473 GstStaticPadTemplate* pad_template, bool onlyTrack)
475 *element = gst_element_factory_make("appsrc", nullptr);
476 g_object_set(*element, "is-live", true, "format", GST_FORMAT_TIME, nullptr);
478 return webkitMediaStreamSrcSetupSrc(self, track, *element, pad_template, TRUE, onlyTrack);
481 static void webkitMediaStreamSrcPostStreamCollection(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream)
483 GST_OBJECT_LOCK(self);
484 self->streamCollection = adoptGRef(gst_stream_collection_new(stream->id().utf8().data()));
485 for (auto& track : stream->tracks()) {
486 auto gststream = webkitMediaStreamNew(track.get());
488 gst_stream_collection_add_stream(self->streamCollection.get(), gststream);
490 GST_OBJECT_UNLOCK(self);
492 gst_element_post_message(GST_ELEMENT(self),
493 gst_message_new_stream_collection(GST_OBJECT(self), self->streamCollection.get()));
496 bool webkitMediaStreamSrcAddTrack(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate* track, bool onlyTrack)
499 if (track->type() == RealtimeMediaSource::Type::Audio)
500 res = webkitMediaStreamSrcSetupAppSrc(self, track, &self->audioSrc, &audioSrcTemplate, onlyTrack);
501 else if (track->type() == RealtimeMediaSource::Type::Video)
502 res = webkitMediaStreamSrcSetupAppSrc(self, track, &self->videoSrc, &videoSrcTemplate, onlyTrack);
504 GST_INFO("Unsupported track type: %d", static_cast<int>(track->type()));
506 if (onlyTrack && res)
512 static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType)
514 if (trackType == RealtimeMediaSource::Type::Audio) {
515 if (self->audioSrc) {
516 gst_element_set_state(self->audioSrc, GST_STATE_NULL);
517 gst_bin_remove(GST_BIN(self), self->audioSrc);
518 self->audioSrc = nullptr;
520 } else if (trackType == RealtimeMediaSource::Type::Video) {
521 if (self->videoSrc) {
522 gst_element_set_state(self->videoSrc, GST_STATE_NULL);
523 gst_bin_remove(GST_BIN(self), self->videoSrc);
524 self->videoSrc = nullptr;
527 GST_INFO("Unsupported track type: %d", static_cast<int>(trackType));
530 bool webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream)
532 ASSERT(WEBKIT_IS_MEDIA_STREAM_SRC(self));
534 webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Audio);
535 webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Video);
537 webkitMediaStreamSrcPostStreamCollection(self, stream);
539 self->stream = stream;
540 self->stream->addObserver(*self->mediaStreamObserver.get());
541 for (auto& track : stream->tracks())
542 webkitMediaStreamSrcAddTrack(self, track.get(), false);
547 static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* gstsample)
549 if (self->videoSrc) {
550 if (!GST_CLOCK_TIME_IS_VALID(self->firstFramePts)) {
551 auto buffer = gst_sample_get_buffer(gstsample);
553 self->firstFramePts = GST_BUFFER_PTS(buffer);
554 auto pad = adoptGRef(gst_element_get_static_pad(self->videoSrc, "src"));
555 gst_pad_set_offset(pad.get(), -self->firstFramePts);
558 gst_app_src_push_sample(GST_APP_SRC(self->videoSrc), gstsample);
562 static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, GstSample* gstsample)
564 if (self->audioSrc) {
565 if (!GST_CLOCK_TIME_IS_VALID(self->firstAudioBufferPts)) {
566 auto buffer = gst_sample_get_buffer(gstsample);
568 self->firstAudioBufferPts = GST_BUFFER_PTS(buffer);
569 auto pad = adoptGRef(gst_element_get_static_pad(self->audioSrc, "src"));
570 gst_pad_set_offset(pad.get(), -self->firstAudioBufferPts);
572 gst_app_src_push_sample(GST_APP_SRC(self->audioSrc), gstsample);
576 static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self,
577 MediaStreamTrackPrivate& track)
579 GRefPtr<GstPad> pad = nullptr;
581 GST_OBJECT_LOCK(self);
582 for (auto tmp = GST_ELEMENT(self)->srcpads; tmp; tmp = tmp->next) {
583 GstPad* tmppad = GST_PAD(tmp->data);
584 const gchar* stream_id;
586 GstEvent* stream_start = gst_pad_get_sticky_event(tmppad, GST_EVENT_STREAM_START, 0);
590 gst_event_parse_stream_start(stream_start, &stream_id);
591 if (String(stream_id) == track.id()) {
596 GST_OBJECT_UNLOCK(self);
599 GST_ERROR_OBJECT(self, "No pad found for %s", track.id().utf8().data());
604 // Make sure that the video.videoWidth is reset to 0
605 webkitMediaStreamSrcPostStreamCollection(self, self->stream.get());
606 auto tags = mediaStreamTrackPrivateGetTags(&track);
607 gst_pad_push_event(pad.get(), gst_event_new_tag(tags));
608 gst_pad_push_event(pad.get(), gst_event_new_eos());
611 GstElement* webkitMediaStreamSrcNew(void)
613 return GST_ELEMENT(g_object_new(webkit_media_stream_src_get_type(), nullptr));
617 #endif // GST_CHECK_VERSION(1, 10, 0)
618 #endif // ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC)