2 * Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Collabora Ltd. All rights reserved.
4 * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5 * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6 * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7 * Copyright (C) 2015, 2016 Metrological Group B.V.
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public License
20 * aint with this library; see the file COPYING.LIB. If not, write to
21 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
26 #include "MediaPlayerPrivateGStreamerBase.h"
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
30 #include "GStreamerUtilities.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsTypes.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
36 #include "MediaPlayer.h"
37 #include "NotImplemented.h"
38 #include "VideoSinkGStreamer.h"
39 #include "WebKitWebSourceGStreamer.h"
40 #include <wtf/glib/GMutexLocker.h>
41 #include <wtf/glib/GUniquePtr.h>
42 #include <wtf/text/AtomicString.h>
43 #include <wtf/text/CString.h>
44 #include <wtf/MathExtras.h>
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
50 #include <gst/app/gstappsink.h>
51 #define GST_USE_UNSTABLE_API
52 #include <gst/gl/gl.h>
53 #undef GST_USE_UNSTABLE_API
55 #include "GLContext.h"
57 #include "GLContextGLX.h"
58 #include <gst/gl/x11/gstgldisplay_x11.h>
62 #include "GLContextEGL.h"
63 #include <gst/gl/egl/gstgldisplay_egl.h>
67 #include "PlatformDisplayX11.h"
71 #include "PlatformDisplayWayland.h"
74 // gstglapi.h may include eglplatform.h and it includes X.h, which
75 // defines None, breaking MediaPlayer::None enum
76 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
78 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
79 #include "VideoTextureCopierGStreamer.h"
80 #endif // USE(GSTREAMER_GL)
82 #if USE(TEXTURE_MAPPER_GL)
83 #include "BitmapTextureGL.h"
84 #include "BitmapTexturePool.h"
85 #include "TextureMapperGL.h"
87 #if USE(COORDINATED_GRAPHICS_THREADED)
88 #include "TextureMapperPlatformLayerBuffer.h"
91 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
95 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
96 #include "SharedBuffer.h"
97 #include "WebKitClearKeyDecryptorGStreamer.h"
98 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
100 #include <runtime/JSCInlines.h>
101 #include <runtime/TypedArrayInlines.h>
102 #include <runtime/Uint8Array.h>
106 GST_DEBUG_CATEGORY(webkit_media_player_debug);
107 #define GST_CAT_DEFAULT webkit_media_player_debug
113 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
114 static AtomicString keySystemIdToUuid(const AtomicString&);
117 void registerWebKitGStreamerElements()
119 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
120 if (!webkitGstCheckVersion(1, 6, 1))
123 GRefPtr<GstElementFactory> clearKeyDecryptorFactory = gst_element_factory_find("webkitclearkey");
124 if (!clearKeyDecryptorFactory)
125 gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT);
129 static int greatestCommonDivisor(int a, int b)
140 #if USE(TEXTURE_MAPPER_GL)
141 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
143 switch (orientation) {
144 case DefaultImageOrientation:
147 return TextureMapperGL::ShouldRotateTexture90;
148 case OriginBottomRight:
149 return TextureMapperGL::ShouldRotateTexture180;
150 case OriginLeftBottom:
151 return TextureMapperGL::ShouldRotateTexture270;
153 ASSERT_NOT_REACHED();
160 #if USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL)
161 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
163 explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags)
165 GstVideoInfo videoInfo;
166 if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
169 m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
170 m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
172 GstBuffer* buffer = gst_sample_get_buffer(sample);
173 if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))))
176 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
180 virtual ~GstVideoFrameHolder()
182 if (UNLIKELY(!m_isValid))
185 gst_video_frame_unmap(&m_videoFrame);
188 const IntSize& size() const { return m_size; }
189 TextureMapperGL::Flags flags() const { return m_flags; }
190 GLuint textureID() const { return m_textureID; }
191 bool isValid() const { return m_isValid; }
194 GstVideoFrame m_videoFrame;
196 TextureMapperGL::Flags m_flags;
198 bool m_isValid { false };
200 #endif // USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL)
202 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
205 , m_readyState(MediaPlayer::HaveNothing)
206 , m_networkState(MediaPlayer::Empty)
207 #if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
208 , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
210 , m_usingFallbackVideoSink(false)
211 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
212 , m_cdmSession(nullptr)
215 g_mutex_init(&m_sampleMutex);
216 #if USE(COORDINATED_GRAPHICS_THREADED)
217 m_platformLayerProxy = adoptRef(new TextureMapperPlatformLayerProxy());
221 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
223 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
224 m_protectionCondition.notifyOne();
226 m_notifier.cancelPendingNotifications();
228 #if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
231 LockHolder locker(m_drawMutex);
232 m_drawCondition.notifyOne();
237 g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
238 #if USE(GSTREAMER_GL)
239 if (GST_IS_BIN(m_videoSink.get())) {
240 GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
241 g_signal_handlers_disconnect_by_data(appsink.get(), this);
246 g_mutex_clear(&m_sampleMutex);
251 g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
253 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
255 client()->platformLayerWillBeDestroyed();
258 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
259 m_cdmSession = nullptr;
263 gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
266 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
268 m_pipeline = pipeline;
271 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
272 static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message)
274 const GstStructure* structure = gst_message_get_structure(message);
276 const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "stream-encryption-systems");
277 ASSERT(streamEncryptionAllowedSystemsValue && G_VALUE_HOLDS(streamEncryptionAllowedSystemsValue, G_TYPE_STRV));
278 const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue));
279 ASSERT(streamEncryptionAllowedSystems);
280 Vector<String> streamEncryptionAllowedSystemsVector;
282 for (i = 0; streamEncryptionAllowedSystems[i]; ++i)
283 streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]);
285 const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events");
286 ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList));
287 unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList);
288 Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector;
289 for (i = 0; i < streamEncryptionEventsListSize; ++i)
290 streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i)))));
292 return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector);
296 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
298 UNUSED_PARAM(message);
299 if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
302 const gchar* contextType;
303 gst_message_parse_context_type(message, &contextType);
305 #if USE(GSTREAMER_GL)
306 GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType, this));
307 if (elementContext) {
308 gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
311 #endif // USE(GSTREAMER_GL)
313 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
314 if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
315 if (isMainThread()) {
316 GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
317 ASSERT_NOT_REACHED();
320 GST_DEBUG("handling drm-preferred-decryption-system-id need context message");
321 std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message);
322 GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size());
323 Vector<uint8_t> concatenatedInitDataChunks;
324 unsigned concatenatedInitDataChunksNumber = 0;
325 String eventKeySystemIdString;
326 for (auto& event : streamEncryptionInformation.first) {
327 GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get()));
328 const char* eventKeySystemId = nullptr;
329 GstBuffer* data = nullptr;
330 gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
332 // Here we receive the DRM init data from the pipeline: we will emit
333 // the needkey event with that data and the browser might create a
334 // CDMSession from this event handler. If such a session was created
335 // We will emit the message event from the session to provide the
336 // DRM challenge to the browser and wait for an update. If on the
337 // contrary no session was created we won't wait and let the pipeline
338 // error out by itself.
340 if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
341 GST_WARNING("cannot map %s protection data", eventKeySystemId);
345 GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
346 GST_MEMDUMP("init data", reinterpret_cast<const unsigned char *>(mapInfo.data), mapInfo.size);
347 concatenatedInitDataChunks.append(mapInfo.data, mapInfo.size);
348 ++concatenatedInitDataChunksNumber;
349 eventKeySystemIdString = eventKeySystemId;
350 if (streamEncryptionInformation.second.contains(eventKeySystemId)) {
351 GST_TRACE("considering init data handled for %s", eventKeySystemId);
352 m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
354 gst_buffer_unmap(data, &mapInfo);
357 if (!concatenatedInitDataChunksNumber)
360 if (concatenatedInitDataChunksNumber > 1)
361 eventKeySystemIdString = emptyString();
363 RunLoop::main().dispatch([this, eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] {
364 GST_DEBUG("scheduling keyNeeded event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size());
365 GST_MEMDUMP("init datas", initData.data(), initData.size());
367 // FIXME: Provide a somehow valid sessionId.
368 RefPtr<Uint8Array> initDataArray = Uint8Array::create(initData.data(), initData.size());
369 needKey(initDataArray);
372 GST_INFO("waiting for a key request to arrive");
373 LockHolder lock(m_protectionMutex);
374 m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
375 return !this->m_lastGenerateKeyRequestKeySystemUuid.isEmpty();
377 if (!m_lastGenerateKeyRequestKeySystemUuid.isEmpty()) {
378 GST_INFO("got a key request, continuing with %s on %s", m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), GST_MESSAGE_SRC_NAME(message));
380 GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
381 GstStructure* contextStructure = gst_context_writable_structure(context.get());
382 gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), nullptr);
383 gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
385 GST_WARNING("did not get a proper key request");
389 #endif // ENABLE(LEGACY_ENCRYPTED_MEDIA)
394 #if USE(GSTREAMER_GL)
395 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase* player)
397 if (!player->ensureGstGLContext())
400 if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
401 GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
402 gst_context_set_gl_display(displayContext, player->gstGLDisplay());
403 return displayContext;
406 if (!g_strcmp0(contextType, "gst.gl.app_context")) {
407 GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
408 GstStructure* structure = gst_context_writable_structure(appContext);
409 #if GST_CHECK_VERSION(1, 11, 0)
410 gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, player->gstGLContext(), nullptr);
412 gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, player->gstGLContext(), nullptr);
420 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
425 auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
429 if (is<PlatformDisplayX11>(sharedDisplay))
430 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
432 if (is<PlatformDisplayX11>(sharedDisplay))
433 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
437 #if PLATFORM(WAYLAND)
438 if (is<PlatformDisplayWayland>(sharedDisplay))
439 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
445 GLContext* webkitContext = sharedDisplay.sharingGLContext();
446 // EGL and GLX are mutually exclusive, no need for ifdefs here.
447 GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
450 GstGLAPI glAPI = GST_GL_API_GLES2;
452 GstGLAPI glAPI = GST_GL_API_OPENGL;
454 ASSERT_NOT_REACHED();
457 PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
461 m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
465 #endif // USE(GSTREAMER_GL)
467 // Returns the size of the video
468 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
473 if (!m_videoSize.isEmpty())
476 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
477 if (!GST_IS_SAMPLE(m_sample.get()))
480 GstCaps* caps = gst_sample_get_caps(m_sample.get());
485 // TODO: handle possible clean aperture data. See
486 // https://bugzilla.gnome.org/show_bug.cgi?id=596571
487 // TODO: handle possible transformation matrix. See
488 // https://bugzilla.gnome.org/show_bug.cgi?id=596326
490 // Get the video PAR and original size, if this fails the
491 // video-sink has likely not yet negotiated its caps.
492 int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
493 IntSize originalSize;
494 GstVideoFormat format;
495 if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
498 #if USE(TEXTURE_MAPPER_GL)
499 // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
500 if (m_renderingCanBeAccelerated) {
501 if (m_videoSourceOrientation.usesWidthAsHeight())
502 originalSize = originalSize.transposedSize();
506 GST_DEBUG("Original video size: %dx%d", originalSize.width(), originalSize.height());
507 GST_DEBUG("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
509 // Calculate DAR based on PAR and video size.
510 int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
511 int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
513 // Divide display width and height by their GCD to avoid possible overflows.
514 int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
515 displayWidth /= displayAspectRatioGCD;
516 displayHeight /= displayAspectRatioGCD;
518 // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
519 guint64 width = 0, height = 0;
520 if (!(originalSize.height() % displayHeight)) {
521 GST_DEBUG("Keeping video original height");
522 width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
523 height = static_cast<guint64>(originalSize.height());
524 } else if (!(originalSize.width() % displayWidth)) {
525 GST_DEBUG("Keeping video original width");
526 height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
527 width = static_cast<guint64>(originalSize.width());
529 GST_DEBUG("Approximating while keeping original video height");
530 width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
531 height = static_cast<guint64>(originalSize.height());
534 GST_DEBUG("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
535 m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
539 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
541 if (!m_volumeElement)
544 GST_DEBUG("Setting volume: %f", volume);
545 gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
548 float MediaPlayerPrivateGStreamerBase::volume() const
550 if (!m_volumeElement)
553 return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
557 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
559 if (!m_player || !m_volumeElement)
562 volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
563 // get_volume() can return values superior to 1.0 if the user
564 // applies software user gain via third party application (GNOME
565 // volume control for instance).
566 volume = CLAMP(volume, 0.0, 1.0);
567 m_player->volumeChanged(static_cast<float>(volume));
570 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
572 // This is called when m_volumeElement receives the notify::volume signal.
573 GST_DEBUG("Volume changed to: %f", player->volume());
575 player->m_notifier.notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
578 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
580 return m_networkState;
583 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
588 void MediaPlayerPrivateGStreamerBase::sizeChanged()
593 void MediaPlayerPrivateGStreamerBase::setMuted(bool muted)
595 if (!m_volumeElement)
598 g_object_set(m_volumeElement.get(), "mute", muted, nullptr);
601 bool MediaPlayerPrivateGStreamerBase::muted() const
603 if (!m_volumeElement)
607 g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
611 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
613 if (!m_player || !m_volumeElement)
617 g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
618 m_player->muteChanged(static_cast<bool>(muted));
621 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
623 // This is called when m_volumeElement receives the notify::mute signal.
624 player->m_notifier.notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
627 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
629 m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled() && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player);
632 #if USE(TEXTURE_MAPPER_GL)
633 void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
635 GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
637 GstVideoGLTextureUploadMeta* meta;
638 if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
639 if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
640 guint ids[4] = { texture.id(), 0, 0, 0 };
642 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
647 // Right now the TextureMapper only supports chromas with one plane
648 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
650 GstVideoFrame videoFrame;
651 if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
654 int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
655 const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
656 texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
657 gst_video_frame_unmap(&videoFrame);
661 #if USE(COORDINATED_GRAPHICS_THREADED)
662 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
664 #if !USE(GSTREAMER_GL)
665 class ConditionNotifier {
667 ConditionNotifier(Lock& lock, Condition& condition)
668 : m_locker(lock), m_condition(condition)
673 m_condition.notifyOne();
677 Condition& m_condition;
679 ConditionNotifier notifier(m_drawMutex, m_drawCondition);
682 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
683 if (!GST_IS_SAMPLE(m_sample.get()))
686 LockHolder holder(m_platformLayerProxy->lock());
688 if (!m_platformLayerProxy->isActive()) {
689 // Consume the buffer (so it gets eventually unreffed) but keep the rest of the info.
690 const GstStructure* info = gst_sample_get_info(m_sample.get());
691 GstStructure* infoCopy = nullptr;
693 infoCopy = gst_structure_copy(info);
694 m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
695 gst_sample_get_segment(m_sample.get()), infoCopy));
699 #if USE(GSTREAMER_GL)
700 std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation));
701 if (UNLIKELY(!frameHolder->isValid()))
704 std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags());
705 layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
706 m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
708 GstVideoInfo videoInfo;
709 if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
712 IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
713 std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GraphicsContext3D::DONT_CARE);
714 if (UNLIKELY(!buffer)) {
715 if (UNLIKELY(!m_context3D))
716 m_context3D = GraphicsContext3D::create(GraphicsContext3DAttributes(), nullptr, GraphicsContext3D::RenderToCurrentGLContext);
718 auto texture = BitmapTextureGL::create(*m_context3D);
719 texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
720 buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
722 updateTexture(buffer->textureGL(), videoInfo);
723 buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0));
724 m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
729 void MediaPlayerPrivateGStreamerBase::repaint()
732 ASSERT(isMainThread());
734 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
735 if (m_renderingCanBeAccelerated && client()) {
736 client()->setPlatformLayerNeedsDisplay();
737 #if USE(GSTREAMER_GL)
738 LockHolder lock(m_drawMutex);
739 m_drawCondition.notifyOne();
747 #if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
748 LockHolder lock(m_drawMutex);
749 m_drawCondition.notifyOne();
753 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
757 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
758 triggerResize = !m_sample;
763 GST_DEBUG("First sample reached the sink, triggering video dimensions update");
764 m_notifier.notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); });
767 #if USE(COORDINATED_GRAPHICS_THREADED)
768 if (!m_renderingCanBeAccelerated) {
769 LockHolder locker(m_drawMutex);
770 m_drawTimer.startOneShot(0);
771 m_drawCondition.wait(m_drawMutex);
775 #if USE(GSTREAMER_GL)
776 pushTextureToCompositor();
779 LockHolder lock(m_drawMutex);
780 if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
782 m_drawCondition.wait(m_drawMutex);
787 #if USE(GSTREAMER_GL)
789 ASSERT(!isMainThread());
791 LockHolder locker(m_drawMutex);
792 m_drawTimer.startOneShot(0);
793 m_drawCondition.wait(m_drawMutex);
801 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
803 player->triggerRepaint(sample);
806 #if USE(GSTREAMER_GL)
807 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
809 GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
810 player->triggerRepaint(sample.get());
814 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
816 GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
817 player->triggerRepaint(sample.get());
822 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
827 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
829 if (context.paintingDisabled())
832 if (!m_player->visible())
835 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
836 if (!GST_IS_SAMPLE(m_sample.get()))
839 ImagePaintingOptions paintingOptions(CompositeCopy);
840 if (m_renderingCanBeAccelerated)
841 paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
843 RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
847 if (Image* image = reinterpret_cast<Image*>(gstImage->image().get()))
848 context.drawImage(*image, rect, gstImage->rect(), paintingOptions);
851 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
852 void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper& textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity)
854 if (!m_player->visible())
857 if (m_usingFallbackVideoSink) {
858 RefPtr<BitmapTexture> texture;
860 TextureMapperGL::Flags flags;
862 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
864 GstVideoInfo videoInfo;
865 if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
868 size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
869 flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
870 texture = textureMapper.acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
871 updateTexture(static_cast<BitmapTextureGL&>(*texture), videoInfo);
873 TextureMapperGL& texmapGL = reinterpret_cast<TextureMapperGL&>(textureMapper);
874 BitmapTextureGL* textureGL = static_cast<BitmapTextureGL*>(texture.get());
875 texmapGL.drawTexture(textureGL->id(), flags, textureGL->size(), targetRect, matrix, opacity);
879 #if USE(GSTREAMER_GL)
880 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
882 GstVideoInfo videoInfo;
883 if (!getSampleVideoInfo(m_sample.get(), videoInfo))
886 GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
887 GstVideoFrame videoFrame;
888 if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
891 unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
892 TextureMapperGL::Flags flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
894 IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
895 TextureMapperGL& textureMapperGL = reinterpret_cast<TextureMapperGL&>(textureMapper);
896 textureMapperGL.drawTexture(textureID, flags, size, targetRect, matrix, opacity);
897 gst_video_frame_unmap(&videoFrame);
902 #if USE(GSTREAMER_GL)
903 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
904 // This should be called with the sample mutex locked.
905 GLContext* MediaPlayerPrivateGStreamerBase::prepareContextForCairoPaint(GstVideoInfo& videoInfo, IntSize& size, IntSize& rotatedSize)
907 if (!getSampleVideoInfo(m_sample.get(), videoInfo))
910 GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
911 context->makeContextCurrent();
913 // Thread-awareness is a huge performance hit on non-Intel drivers.
914 cairo_gl_device_set_thread_aware(context->cairoDevice(), FALSE);
916 size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
917 rotatedSize = m_videoSourceOrientation.usesWidthAsHeight() ? size.transposedSize() : size;
922 // This should be called with the sample mutex locked.
923 bool MediaPlayerPrivateGStreamerBase::paintToCairoSurface(cairo_surface_t* outputSurface, cairo_device_t* device, GstVideoInfo& videoInfo, const IntSize& size, const IntSize& rotatedSize, bool flipY)
925 GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
926 GstVideoFrame videoFrame;
927 if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
930 unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
931 RefPtr<cairo_surface_t> surface = adoptRef(cairo_gl_surface_create_for_texture(device, CAIRO_CONTENT_COLOR_ALPHA, textureID, size.width(), size.height()));
932 RefPtr<cairo_t> cr = adoptRef(cairo_create(outputSurface));
934 switch (m_videoSourceOrientation) {
935 case DefaultImageOrientation:
938 cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
939 cairo_rotate(cr.get(), piOverTwoDouble);
940 cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5);
942 case OriginBottomRight:
943 cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
944 cairo_rotate(cr.get(), piDouble);
945 cairo_translate(cr.get(), -rotatedSize.width() * 0.5, -rotatedSize.height() * 0.5);
947 case OriginLeftBottom:
948 cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
949 cairo_rotate(cr.get(), 3 * piOverTwoDouble);
950 cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5);
953 ASSERT_NOT_REACHED();
958 cairo_scale(cr.get(), 1.0f, -1.0f);
959 cairo_translate(cr.get(), 0.0f, -size.height());
962 cairo_set_source_surface(cr.get(), surface.get(), 0, 0);
963 cairo_set_operator(cr.get(), CAIRO_OPERATOR_SOURCE);
964 cairo_paint(cr.get());
966 gst_video_frame_unmap(&videoFrame);
970 #endif // USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
972 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
974 #if USE(GSTREAMER_GL)
975 UNUSED_PARAM(context);
977 if (m_usingFallbackVideoSink)
980 if (premultiplyAlpha)
983 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
985 GstVideoInfo videoInfo;
986 if (!getSampleVideoInfo(m_sample.get(), videoInfo))
989 GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
990 GstVideoFrame videoFrame;
991 if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
994 IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
995 if (m_videoSourceOrientation.usesWidthAsHeight())
996 size = size.transposedSize();
997 unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
999 if (!m_videoTextureCopier)
1000 m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>();
1002 bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
1004 gst_video_frame_unmap(&videoFrame);
1012 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
1014 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
1015 if (m_usingFallbackVideoSink)
1018 GstVideoInfo videoInfo;
1019 IntSize size, rotatedSize;
1020 WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
1021 GLContext* context = prepareContextForCairoPaint(videoInfo, size, rotatedSize);
1025 RefPtr<cairo_surface_t> rotatedSurface = adoptRef(cairo_gl_surface_create(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, rotatedSize.width(), rotatedSize.height()));
1026 if (!paintToCairoSurface(rotatedSurface.get(), context->cairoDevice(), videoInfo, size, rotatedSize, false))
1029 return rotatedSurface;
1036 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
1038 if (m_videoSourceOrientation == orientation)
1041 m_videoSourceOrientation = orientation;
1044 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
1049 PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
1051 return NoPlatformMedia;
1054 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
1056 if (m_readyState == MediaPlayer::HaveNothing)
1057 return MediaPlayer::Unknown;
1060 return MediaPlayer::LiveStream;
1062 return MediaPlayer::Download;
1065 #if USE(GSTREAMER_GL)
1066 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
1068 if (!webkitGstCheckVersion(1, 8, 0))
1071 GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
1075 g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1076 g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1077 g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1082 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1084 // FIXME: Currently it's not possible to get the video frames and caps using this approach until
1085 // the pipeline gets into playing state. Due to this, trying to grab a frame and painting it by some
1086 // other mean (canvas or webgl) before playing state can result in a crash.
1087 // This is being handled in https://bugs.webkit.org/show_bug.cgi?id=159460.
1088 if (!webkitGstCheckVersion(1, 8, 0))
1091 gboolean result = TRUE;
1092 GstElement* videoSink = gst_bin_new(nullptr);
1093 GstElement* upload = gst_element_factory_make("glupload", nullptr);
1094 GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1095 GstElement* appsink = createGLAppSink();
1097 if (!appsink || !upload || !colorconvert) {
1098 GST_WARNING("Failed to create GstGL elements");
1099 gst_object_unref(videoSink);
1102 gst_object_unref(upload);
1104 gst_object_unref(colorconvert);
1106 gst_object_unref(appsink);
1111 gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1113 GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) { RGBA }"));
1115 result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1116 result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1118 GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1119 gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1122 GST_WARNING("Failed to link GstGL elements");
1123 gst_object_unref(videoSink);
1124 videoSink = nullptr;
1130 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1132 acceleratedRenderingStateChanged();
1134 #if USE(GSTREAMER_GL)
1135 if (m_renderingCanBeAccelerated)
1136 m_videoSink = createVideoSinkGL();
1140 m_usingFallbackVideoSink = true;
1141 m_videoSink = webkitVideoSinkNew();
1142 g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1145 GstElement* videoSink = nullptr;
1146 m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1148 g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1150 // Turn off text overlay unless logging is enabled.
1152 g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1154 if (!isLogChannelEnabled("Media"))
1155 g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1156 #endif // LOG_DISABLED
1158 if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1159 g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1160 videoSink = m_fpsSink.get();
1162 m_fpsSink = nullptr;
1166 videoSink = m_videoSink.get();
1173 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1175 ASSERT(!m_volumeElement);
1176 m_volumeElement = volume;
1178 // We don't set the initial volume because we trust the sink to keep it for us. See
1179 // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1180 if (!m_player->platformVolumeConfigurationRequired()) {
1181 GST_DEBUG("Setting stream volume to %f", m_player->volume());
1182 g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1184 GST_DEBUG("Not setting stream volume, trusting system one");
1186 GST_DEBUG("Setting stream muted %d", m_player->muted());
1187 g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1189 g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1190 g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1193 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1195 guint64 decodedFrames = 0;
1197 g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1198 return static_cast<unsigned>(decodedFrames);
1201 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1203 guint64 framesDropped = 0;
1205 g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1206 return static_cast<unsigned>(framesDropped);
1209 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1211 GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1212 gint64 position = 0;
1214 if (audioSink() && gst_element_query(audioSink(), query))
1215 gst_query_parse_position(query, 0, &position);
1217 gst_query_unref(query);
1218 return static_cast<unsigned>(position);
1221 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1223 GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1224 gint64 position = 0;
1226 if (gst_element_query(m_videoSink.get(), query))
1227 gst_query_parse_position(query, 0, &position);
1229 gst_query_unref(query);
1230 return static_cast<unsigned>(position);
1233 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1234 void MediaPlayerPrivateGStreamerBase::needKey(RefPtr<Uint8Array> initData)
1236 if (!m_player->keyNeeded(initData.get()))
1237 GST_INFO("no event handler for key needed");
1240 void MediaPlayerPrivateGStreamerBase::setCDMSession(CDMSession* session)
1242 GST_DEBUG("setting CDM session to %p", session);
1243 m_cdmSession = session;
1246 void MediaPlayerPrivateGStreamerBase::keyAdded()
1250 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1252 if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1253 GST_DEBUG("event %u already handled", GST_EVENT_SEQNUM(event));
1254 m_handledProtectionEvents.remove(GST_EVENT_SEQNUM(event));
1258 const gchar* eventKeySystemId = nullptr;
1259 GstBuffer* data = nullptr;
1260 gst_event_parse_protection(event, &eventKeySystemId, &data, nullptr);
1263 if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
1264 GST_WARNING("cannot map %s protection data", eventKeySystemId);
1268 GST_DEBUG("scheduling keyNeeded event for %s with init data size of %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
1269 GST_MEMDUMP("init datas", mapInfo.data, mapInfo.size);
1270 RefPtr<Uint8Array> initDataArray = Uint8Array::create(mapInfo.data, mapInfo.size);
1271 needKey(initDataArray);
1272 gst_buffer_unmap(data, &mapInfo);
1275 void MediaPlayerPrivateGStreamerBase::receivedGenerateKeyRequest(const String& keySystem)
1277 GST_DEBUG("received generate key request for %s", keySystem.utf8().data());
1278 m_lastGenerateKeyRequestKeySystemUuid = keySystemIdToUuid(keySystem);
1279 m_protectionCondition.notifyOne();
1282 static AtomicString keySystemIdToUuid(const AtomicString& id)
1284 if (equalIgnoringASCIICase(id, CLEAR_KEY_PROTECTION_SYSTEM_ID))
1285 return AtomicString(CLEAR_KEY_PROTECTION_SYSTEM_UUID);
1290 std::unique_ptr<CDMSession> MediaPlayerPrivateGStreamerBase::createSession(const String& keySystem, CDMSessionClient*)
1292 GST_INFO("Requested CDMSession for KeySystem %s: Returning null.", keySystem.utf8().data());
1296 void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer)
1298 gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
1299 gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr)));
1303 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1305 GST_INFO("Checking for KeySystem support with %s and type %s: false.", keySystem.utf8().data(), mimeType.utf8().data());
1309 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1311 UNUSED_PARAM(parameters);
1317 #endif // USE(GSTREAMER)