[WPE][GTK] Bump minimum versions of GLib, GTK, libsoup, ATK, GStreamer, and Cairo
[WebKit-https.git] / Source / WebCore / platform / mediastream / gstreamer / GStreamerMediaStreamSource.cpp
1 /*
2  * Copyright (C) 2018 Metrological Group B.V.
3  * Author: Thibault Saunier <tsaunier@igalia.com>
4  * Author: Alejandro G. Castro <alex@igalia.com>
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Library General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Library General Public License for more details.
15  *
16  * You should have received a copy of the GNU Library General Public License
17  * aint with this library; see the file COPYING.LIB.  If not, write to
18  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19  * Boston, MA 02110-1301, USA.
20  */
21
22 #include "config.h"
23
24 #if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER)
25 #include "GStreamerMediaStreamSource.h"
26
27 #include "AudioTrackPrivate.h"
28 #include "GStreamerAudioData.h"
29 #include "GStreamerCommon.h"
30 #include "GStreamerVideoCaptureSource.h"
31 #include "MediaSampleGStreamer.h"
32 #include "VideoTrackPrivate.h"
33
34 #include <gst/app/gstappsrc.h>
35 #include <gst/base/gstflowcombiner.h>
36
37 namespace WebCore {
38
39 static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* gstsample);
40 static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, GstSample* gstsample);
41 static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate&);
42 static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType);
43
44 static GstStaticPadTemplate videoSrcTemplate = GST_STATIC_PAD_TEMPLATE("video_src",
45     GST_PAD_SRC,
46     GST_PAD_SOMETIMES,
47     GST_STATIC_CAPS("video/x-raw;video/x-h264;video/x-vp8"));
48
49 static GstStaticPadTemplate audioSrcTemplate = GST_STATIC_PAD_TEMPLATE("audio_src",
50     GST_PAD_SRC,
51     GST_PAD_SOMETIMES,
52     GST_STATIC_CAPS("audio/x-raw(ANY);"));
53
54 static GstTagList* mediaStreamTrackPrivateGetTags(MediaStreamTrackPrivate* track)
55 {
56     auto taglist = gst_tag_list_new_empty();
57
58     if (!track->label().isEmpty()) {
59         gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND,
60             GST_TAG_TITLE, track->label().utf8().data(), nullptr);
61     }
62
63     if (track->type() == RealtimeMediaSource::Type::Audio) {
64         gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_KIND,
65             static_cast<int>(AudioTrackPrivate::Kind::Main), nullptr);
66     } else if (track->type() == RealtimeMediaSource::Type::Video) {
67         gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND, WEBKIT_MEDIA_TRACK_TAG_KIND,
68             static_cast<int>(VideoTrackPrivate::Kind::Main), nullptr);
69
70         if (track->isCaptureTrack()) {
71             GStreamerVideoCaptureSource& source = static_cast<GStreamerVideoCaptureSource&>(
72                 track->source());
73
74             gst_tag_list_add(taglist, GST_TAG_MERGE_APPEND,
75                 WEBKIT_MEDIA_TRACK_TAG_WIDTH, source.size().width(),
76                 WEBKIT_MEDIA_TRACK_TAG_HEIGHT, source.size().height(), nullptr);
77         }
78     }
79
80     return taglist;
81 }
82
83 GstStream* webkitMediaStreamNew(MediaStreamTrackPrivate* track)
84 {
85     GRefPtr<GstCaps> caps;
86     GstStreamType type;
87
88     if (track->type() == RealtimeMediaSource::Type::Audio) {
89         caps = adoptGRef(gst_static_pad_template_get_caps(&audioSrcTemplate));
90         type = GST_STREAM_TYPE_AUDIO;
91     } else if (track->type() == RealtimeMediaSource::Type::Video) {
92         caps = adoptGRef(gst_static_pad_template_get_caps(&videoSrcTemplate));
93         type = GST_STREAM_TYPE_VIDEO;
94     } else {
95         GST_FIXME("Handle %d type", static_cast<int>(track->type()));
96
97         return nullptr;
98     }
99
100     auto gststream = (GstStream*)gst_stream_new(track->id().utf8().data(),
101         caps.get(), type, GST_STREAM_FLAG_SELECT);
102     auto tags = adoptGRef(mediaStreamTrackPrivateGetTags(track));
103     gst_stream_set_tags(gststream, tags.get());
104
105     return gststream;
106 }
107
108 class WebKitMediaStreamTrackObserver
109     : public MediaStreamTrackPrivate::Observer {
110 public:
111     virtual ~WebKitMediaStreamTrackObserver() { };
112     WebKitMediaStreamTrackObserver(WebKitMediaStreamSrc* src)
113         : m_mediaStreamSrc(src) { }
114     void trackStarted(MediaStreamTrackPrivate&) final { };
115
116     void trackEnded(MediaStreamTrackPrivate& track) final
117     {
118         webkitMediaStreamSrcTrackEnded(m_mediaStreamSrc, track);
119     }
120
121     void trackMutedChanged(MediaStreamTrackPrivate&) final { };
122     void trackSettingsChanged(MediaStreamTrackPrivate&) final { };
123     void trackEnabledChanged(MediaStreamTrackPrivate&) final { };
124     void readyStateChanged(MediaStreamTrackPrivate&) final { };
125
126     void sampleBufferUpdated(MediaStreamTrackPrivate&, MediaSample& sample) final
127     {
128         auto gstsample = static_cast<MediaSampleGStreamer*>(&sample)->platformSample().sample.gstSample;
129
130         webkitMediaStreamSrcPushVideoSample(m_mediaStreamSrc, gstsample);
131     }
132
133     void audioSamplesAvailable(MediaStreamTrackPrivate&, const MediaTime&, const PlatformAudioData& audioData, const AudioStreamDescription&, size_t) final
134     {
135         auto audiodata = static_cast<const GStreamerAudioData&>(audioData);
136
137         webkitMediaStreamSrcPushAudioSample(m_mediaStreamSrc, audiodata.getSample());
138     }
139
140 private:
141     WebKitMediaStreamSrc* m_mediaStreamSrc;
142 };
143
144 class WebKitMediaStreamObserver
145     : public MediaStreamPrivate::Observer {
146 public:
147     virtual ~WebKitMediaStreamObserver() { };
148     WebKitMediaStreamObserver(WebKitMediaStreamSrc* src)
149         : m_mediaStreamSrc(src) { }
150
151     void characteristicsChanged() final { GST_DEBUG_OBJECT(m_mediaStreamSrc.get(), "renegotiation should happen"); }
152     void activeStatusChanged() final { }
153
154     void didAddTrack(MediaStreamTrackPrivate& track) final
155     {
156         webkitMediaStreamSrcAddTrack(m_mediaStreamSrc.get(), &track, false);
157     }
158
159     void didRemoveTrack(MediaStreamTrackPrivate& track) final
160     {
161         webkitMediaStreamSrcRemoveTrackByType(m_mediaStreamSrc.get(), track.type());
162     }
163
164 private:
165     GRefPtr<WebKitMediaStreamSrc> m_mediaStreamSrc;
166 };
167
168 typedef struct _WebKitMediaStreamSrcClass WebKitMediaStreamSrcClass;
169 struct _WebKitMediaStreamSrc {
170     GstBin parent_instance;
171
172     gchar* uri;
173
174     GstElement* audioSrc;
175     GstClockTime firstAudioBufferPts;
176     GstElement* videoSrc;
177     GstClockTime firstFramePts;
178
179     std::unique_ptr<WebKitMediaStreamTrackObserver> mediaStreamTrackObserver;
180     std::unique_ptr<WebKitMediaStreamObserver> mediaStreamObserver;
181     volatile gint npads;
182     RefPtr<MediaStreamPrivate> stream;
183     RefPtr<MediaStreamTrackPrivate> track;
184
185     GstFlowCombiner* flowCombiner;
186     GRefPtr<GstStreamCollection> streamCollection;
187 };
188
189 struct _WebKitMediaStreamSrcClass {
190     GstBinClass parent_class;
191 };
192
193 enum {
194     PROP_0,
195     PROP_IS_LIVE,
196     PROP_LAST
197 };
198
199 static GstURIType webkit_media_stream_src_uri_get_type(GType)
200 {
201     return GST_URI_SRC;
202 }
203
204 static const gchar* const* webkit_media_stream_src_uri_get_protocols(GType)
205 {
206     static const gchar* protocols[] = { "mediastream", nullptr };
207
208     return protocols;
209 }
210
211 static gchar* webkit_media_stream_src_uri_get_uri(GstURIHandler* handler)
212 {
213     WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
214
215     /* FIXME: make thread-safe */
216     return g_strdup(self->uri);
217 }
218
219 static gboolean webkitMediaStreamSrcUriSetUri(GstURIHandler* handler, const gchar* uri,
220     GError**)
221 {
222     WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
223     self->uri = g_strdup(uri);
224
225     return TRUE;
226 }
227
228 static void webkitMediaStreamSrcUriHandlerInit(gpointer g_iface, gpointer)
229 {
230     GstURIHandlerInterface* iface = (GstURIHandlerInterface*)g_iface;
231
232     iface->get_type = webkit_media_stream_src_uri_get_type;
233     iface->get_protocols = webkit_media_stream_src_uri_get_protocols;
234     iface->get_uri = webkit_media_stream_src_uri_get_uri;
235     iface->set_uri = webkitMediaStreamSrcUriSetUri;
236 }
237
238 GST_DEBUG_CATEGORY_STATIC(webkitMediaStreamSrcDebug);
239 #define GST_CAT_DEFAULT webkitMediaStreamSrcDebug
240
241 #define doInit                                                                                                                                                              \
242     G_IMPLEMENT_INTERFACE(GST_TYPE_URI_HANDLER, webkitMediaStreamSrcUriHandlerInit);                                                                                    \
243     GST_DEBUG_CATEGORY_INIT(webkitMediaStreamSrcDebug, "webkitwebmediastreamsrc", 0, "mediastreamsrc element");                                                           \
244     gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_WIDTH, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream width", "Webkit MediaStream width", gst_tag_merge_use_first);    \
245     gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_HEIGHT, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream height", "Webkit MediaStream height", gst_tag_merge_use_first); \
246     gst_tag_register_static(WEBKIT_MEDIA_TRACK_TAG_KIND, GST_TAG_FLAG_META, G_TYPE_INT, "Webkit MediaStream Kind", "Webkit MediaStream Kind", gst_tag_merge_use_first);
247
248 G_DEFINE_TYPE_WITH_CODE(WebKitMediaStreamSrc, webkit_media_stream_src, GST_TYPE_BIN, doInit);
249
250 static void webkitMediaStreamSrcSetProperty(GObject* object, guint prop_id,
251     const GValue*, GParamSpec* pspec)
252 {
253     switch (prop_id) {
254     default:
255         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
256         break;
257     }
258 }
259
260 static void webkitMediaStreamSrcGetProperty(GObject* object, guint prop_id, GValue* value,
261     GParamSpec* pspec)
262 {
263     switch (prop_id) {
264     case PROP_IS_LIVE:
265         g_value_set_boolean(value, TRUE);
266         break;
267     default:
268         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
269         break;
270     }
271 }
272
273 static void webkitMediaStreamSrcDispose(GObject* object)
274 {
275     WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
276
277     if (self->audioSrc) {
278         gst_bin_remove(GST_BIN(self), self->audioSrc);
279         self->audioSrc = nullptr;
280     }
281
282     if (self->videoSrc) {
283         gst_bin_remove(GST_BIN(self), self->videoSrc);
284         self->videoSrc = nullptr;
285     }
286 }
287
288 static void webkitMediaStreamSrcFinalize(GObject* object)
289 {
290     WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
291
292     GST_OBJECT_LOCK(self);
293     if (self->stream) {
294         for (auto& track : self->stream->tracks())
295             track->removeObserver(*self->mediaStreamTrackObserver.get());
296
297         self->stream->removeObserver(*self->mediaStreamObserver);
298         self->stream = nullptr;
299     }
300     GST_OBJECT_UNLOCK(self);
301
302     g_clear_pointer(&self->uri, g_free);
303     gst_flow_combiner_free(self->flowCombiner);
304 }
305
306 static GstStateChangeReturn webkitMediaStreamSrcChangeState(GstElement* element, GstStateChange transition)
307 {
308     GstStateChangeReturn result;
309     auto* self = WEBKIT_MEDIA_STREAM_SRC(element);
310
311     if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
312
313         GST_OBJECT_LOCK(self);
314         if (self->stream) {
315             for (auto& track : self->stream->tracks())
316                 track->removeObserver(*self->mediaStreamTrackObserver.get());
317         } else if (self->track)
318             self->track->removeObserver(*self->mediaStreamTrackObserver.get());
319         GST_OBJECT_UNLOCK(self);
320     }
321
322     result = GST_ELEMENT_CLASS(webkit_media_stream_src_parent_class)->change_state(element, transition);
323
324     if (transition == GST_STATE_CHANGE_READY_TO_PAUSED)
325         result = GST_STATE_CHANGE_NO_PREROLL;
326
327     return result;
328 }
329
330 static void webkit_media_stream_src_class_init(WebKitMediaStreamSrcClass* klass)
331 {
332     GObjectClass* gobject_class = G_OBJECT_CLASS(klass);
333     GstElementClass* gstelement_klass = GST_ELEMENT_CLASS(klass);
334
335     gobject_class->finalize = webkitMediaStreamSrcFinalize;
336     gobject_class->dispose = webkitMediaStreamSrcDispose;
337     gobject_class->get_property = webkitMediaStreamSrcGetProperty;
338     gobject_class->set_property = webkitMediaStreamSrcSetProperty;
339
340     g_object_class_install_property(gobject_class, PROP_IS_LIVE,
341         g_param_spec_boolean("is-live", "Is Live",
342             "Let playbin3 know we are a live source.",
343             TRUE, (GParamFlags)(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
344
345     gstelement_klass->change_state = webkitMediaStreamSrcChangeState;
346     gst_element_class_add_pad_template(gstelement_klass,
347         gst_static_pad_template_get(&videoSrcTemplate));
348     gst_element_class_add_pad_template(gstelement_klass,
349         gst_static_pad_template_get(&audioSrcTemplate));
350 }
351
352 static void webkit_media_stream_src_init(WebKitMediaStreamSrc* self)
353 {
354     self->mediaStreamTrackObserver = std::make_unique<WebKitMediaStreamTrackObserver>(self);
355     self->mediaStreamObserver = std::make_unique<WebKitMediaStreamObserver>(self);
356     self->flowCombiner = gst_flow_combiner_new();
357     self->firstAudioBufferPts = GST_CLOCK_TIME_NONE;
358     self->firstFramePts = GST_CLOCK_TIME_NONE;
359 }
360
361 typedef struct {
362     WebKitMediaStreamSrc* self;
363     RefPtr<MediaStreamTrackPrivate> track;
364     GstStaticPadTemplate* pad_template;
365 } ProbeData;
366
367 static GstFlowReturn webkitMediaStreamSrcChain(GstPad* pad, GstObject* parent, GstBuffer* buffer)
368 {
369     GstFlowReturn result, chain_result;
370     GRefPtr<WebKitMediaStreamSrc> self = adoptGRef(WEBKIT_MEDIA_STREAM_SRC(gst_object_get_parent(parent)));
371
372     chain_result = gst_proxy_pad_chain_default(pad, GST_OBJECT(self.get()), buffer);
373     result = gst_flow_combiner_update_pad_flow(self.get()->flowCombiner, pad, chain_result);
374
375     if (result == GST_FLOW_FLUSHING)
376         return chain_result;
377
378     return result;
379 }
380
381 static void webkitMediaStreamSrcAddPad(WebKitMediaStreamSrc* self, GstPad* target, GstStaticPadTemplate* pad_template)
382 {
383     auto padname = makeString("src_", g_atomic_int_add(&(self->npads), 1));
384     auto ghostpad = gst_ghost_pad_new_from_template(padname.utf8().data(), target,
385         gst_static_pad_template_get(pad_template));
386
387     GST_DEBUG_OBJECT(self, "%s Ghosting %" GST_PTR_FORMAT,
388         gst_object_get_path_string(GST_OBJECT_CAST(self)),
389         target);
390
391     auto proxypad = adoptGRef(GST_PAD(gst_proxy_pad_get_internal(GST_PROXY_PAD(ghostpad))));
392     gst_pad_set_active(ghostpad, TRUE);
393     if (!gst_element_add_pad(GST_ELEMENT(self), GST_PAD(ghostpad))) {
394         GST_ERROR_OBJECT(self, "Could not add pad %s:%s", GST_DEBUG_PAD_NAME(ghostpad));
395         ASSERT_NOT_REACHED();
396
397         return;
398     }
399
400     gst_flow_combiner_add_pad(self->flowCombiner, proxypad.get());
401     gst_pad_set_chain_function(proxypad.get(),
402         static_cast<GstPadChainFunction>(webkitMediaStreamSrcChain));
403 }
404
405 static GstPadProbeReturn webkitMediaStreamSrcPadProbeCb(GstPad* pad, GstPadProbeInfo* info, ProbeData* data)
406 {
407     GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info);
408     WebKitMediaStreamSrc* self = data->self;
409
410     switch (GST_EVENT_TYPE(event)) {
411     case GST_EVENT_STREAM_START: {
412         const gchar* stream_id;
413         GRefPtr<GstStream> stream = nullptr;
414
415         gst_event_parse_stream_start(event, &stream_id);
416         if (!g_strcmp0(stream_id, data->track->id().utf8().data())) {
417             GST_INFO_OBJECT(pad, "Event has been sticked already");
418             return GST_PAD_PROBE_OK;
419         }
420
421         auto stream_start = gst_event_new_stream_start(data->track->id().utf8().data());
422         gst_event_set_group_id(stream_start, 1);
423         gst_event_unref(event);
424
425         gst_pad_push_event(pad, stream_start);
426         gst_pad_push_event(pad, gst_event_new_tag(mediaStreamTrackPrivateGetTags(data->track.get())));
427
428         webkitMediaStreamSrcAddPad(self, pad, data->pad_template);
429
430         return GST_PAD_PROBE_HANDLED;
431     }
432     default:
433         break;
434     }
435
436     return GST_PAD_PROBE_OK;
437 }
438
439 static gboolean webkitMediaStreamSrcSetupSrc(WebKitMediaStreamSrc* self,
440     MediaStreamTrackPrivate* track, GstElement* element,
441     GstStaticPadTemplate* pad_template, gboolean observe_track,
442     bool onlyTrack)
443 {
444     auto pad = adoptGRef(gst_element_get_static_pad(element, "src"));
445
446     gst_bin_add(GST_BIN(self), element);
447
448     if (!onlyTrack) {
449         ProbeData* data = new ProbeData;
450         data->self = WEBKIT_MEDIA_STREAM_SRC(self);
451         data->pad_template = pad_template;
452         data->track = track;
453
454         gst_pad_add_probe(pad.get(), (GstPadProbeType)GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
455             (GstPadProbeCallback)webkitMediaStreamSrcPadProbeCb, data,
456             [](gpointer data) {
457                 delete (ProbeData*)data;
458             });
459     } else
460         webkitMediaStreamSrcAddPad(self, pad.get(), pad_template);
461
462     if (observe_track)
463         track->addObserver(*self->mediaStreamTrackObserver.get());
464
465     gst_element_sync_state_with_parent(element);
466     return TRUE;
467 }
468
469 static gboolean webkitMediaStreamSrcSetupAppSrc(WebKitMediaStreamSrc* self,
470     MediaStreamTrackPrivate* track, GstElement** element,
471     GstStaticPadTemplate* pad_template, bool onlyTrack)
472 {
473     *element = gst_element_factory_make("appsrc", nullptr);
474     g_object_set(*element, "is-live", true, "format", GST_FORMAT_TIME, nullptr);
475
476     return webkitMediaStreamSrcSetupSrc(self, track, *element, pad_template, TRUE, onlyTrack);
477 }
478
479 static void webkitMediaStreamSrcPostStreamCollection(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream)
480 {
481     GST_OBJECT_LOCK(self);
482     self->streamCollection = adoptGRef(gst_stream_collection_new(stream->id().utf8().data()));
483     for (auto& track : stream->tracks()) {
484         auto gststream = webkitMediaStreamNew(track.get());
485
486         gst_stream_collection_add_stream(self->streamCollection.get(), gststream);
487     }
488     GST_OBJECT_UNLOCK(self);
489
490     gst_element_post_message(GST_ELEMENT(self),
491         gst_message_new_stream_collection(GST_OBJECT(self), self->streamCollection.get()));
492 }
493
494 bool webkitMediaStreamSrcAddTrack(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate* track, bool onlyTrack)
495 {
496     bool res = false;
497     if (track->type() == RealtimeMediaSource::Type::Audio)
498         res = webkitMediaStreamSrcSetupAppSrc(self, track, &self->audioSrc, &audioSrcTemplate, onlyTrack);
499     else if (track->type() == RealtimeMediaSource::Type::Video)
500         res = webkitMediaStreamSrcSetupAppSrc(self, track, &self->videoSrc, &videoSrcTemplate, onlyTrack);
501     else
502         GST_INFO("Unsupported track type: %d", static_cast<int>(track->type()));
503
504     if (onlyTrack && res)
505         self->track = track;
506
507     return false;
508 }
509
510 static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType)
511 {
512     if (trackType == RealtimeMediaSource::Type::Audio) {
513         if (self->audioSrc) {
514             gst_element_set_state(self->audioSrc, GST_STATE_NULL);
515             gst_bin_remove(GST_BIN(self), self->audioSrc);
516             self->audioSrc = nullptr;
517         }
518     } else if (trackType == RealtimeMediaSource::Type::Video) {
519         if (self->videoSrc) {
520             gst_element_set_state(self->videoSrc, GST_STATE_NULL);
521             gst_bin_remove(GST_BIN(self), self->videoSrc);
522             self->videoSrc = nullptr;
523         }
524     } else
525         GST_INFO("Unsupported track type: %d", static_cast<int>(trackType));
526 }
527
528 bool webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream)
529 {
530     ASSERT(WEBKIT_IS_MEDIA_STREAM_SRC(self));
531
532     webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Audio);
533     webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Video);
534
535     webkitMediaStreamSrcPostStreamCollection(self, stream);
536
537     self->stream = stream;
538     self->stream->addObserver(*self->mediaStreamObserver.get());
539     for (auto& track : stream->tracks())
540         webkitMediaStreamSrcAddTrack(self, track.get(), false);
541
542     return TRUE;
543 }
544
545 static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* gstsample)
546 {
547     if (self->videoSrc) {
548         if (!GST_CLOCK_TIME_IS_VALID(self->firstFramePts)) {
549             auto buffer = gst_sample_get_buffer(gstsample);
550
551             self->firstFramePts = GST_BUFFER_PTS(buffer);
552             auto pad = adoptGRef(gst_element_get_static_pad(self->videoSrc, "src"));
553             gst_pad_set_offset(pad.get(), -self->firstFramePts);
554         }
555
556         gst_app_src_push_sample(GST_APP_SRC(self->videoSrc), gstsample);
557     }
558 }
559
560 static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, GstSample* gstsample)
561 {
562     if (self->audioSrc) {
563         if (!GST_CLOCK_TIME_IS_VALID(self->firstAudioBufferPts)) {
564             auto buffer = gst_sample_get_buffer(gstsample);
565
566             self->firstAudioBufferPts = GST_BUFFER_PTS(buffer);
567             auto pad = adoptGRef(gst_element_get_static_pad(self->audioSrc, "src"));
568             gst_pad_set_offset(pad.get(), -self->firstAudioBufferPts);
569         }
570         gst_app_src_push_sample(GST_APP_SRC(self->audioSrc), gstsample);
571     }
572 }
573
574 static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self,
575     MediaStreamTrackPrivate& track)
576 {
577     GRefPtr<GstPad> pad = nullptr;
578
579     GST_OBJECT_LOCK(self);
580     for (auto tmp = GST_ELEMENT(self)->srcpads; tmp; tmp = tmp->next) {
581         GstPad* tmppad = GST_PAD(tmp->data);
582         const gchar* stream_id;
583
584         GstEvent* stream_start = gst_pad_get_sticky_event(tmppad, GST_EVENT_STREAM_START, 0);
585         if (!stream_start)
586             continue;
587
588         gst_event_parse_stream_start(stream_start, &stream_id);
589         if (String(stream_id) == track.id()) {
590             pad = tmppad;
591             break;
592         }
593     }
594     GST_OBJECT_UNLOCK(self);
595
596     if (!pad) {
597         GST_ERROR_OBJECT(self, "No pad found for %s", track.id().utf8().data());
598
599         return;
600     }
601
602     // Make sure that the video.videoWidth is reset to 0
603     webkitMediaStreamSrcPostStreamCollection(self, self->stream.get());
604     auto tags = mediaStreamTrackPrivateGetTags(&track);
605     gst_pad_push_event(pad.get(), gst_event_new_tag(tags));
606     gst_pad_push_event(pad.get(), gst_event_new_eos());
607 }
608
609 GstElement* webkitMediaStreamSrcNew(void)
610 {
611     return GST_ELEMENT(g_object_new(webkit_media_stream_src_get_type(), nullptr));
612 }
613
614 } // WebCore
615
616 #endif // ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC)