2 * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #import "SourceBufferPrivateAVFObjC.h"
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
34 #import "CDMSessionAVContentKeySession.h"
35 #import "CDMSessionMediaSourceAVFObjC.h"
36 #import "InbandTextTrackPrivateAVFObjC.h"
38 #import "MediaDescription.h"
39 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
40 #import "MediaSample.h"
41 #import "MediaSampleAVFObjC.h"
42 #import "MediaSourcePrivateAVFObjC.h"
43 #import "NotImplemented.h"
44 #import "SharedBuffer.h"
45 #import "SourceBufferPrivateClient.h"
46 #import "TimeRanges.h"
47 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
48 #import "WebCoreDecompressionSession.h"
49 #import <AVFoundation/AVAssetTrack.h>
50 #import <JavaScriptCore/TypedArrayInlines.h>
51 #import <QuartzCore/CALayer.h>
52 #import <objc/runtime.h>
53 #import <pal/avfoundation/MediaTimeAVFoundation.h>
54 #import <pal/spi/mac/AVFoundationSPI.h>
55 #import <wtf/BlockObjCExceptions.h>
56 #import <wtf/HashCountedSet.h>
57 #import <wtf/MainThread.h>
58 #import <wtf/Semaphore.h>
59 #import <wtf/SoftLinking.h>
60 #import <wtf/WeakPtr.h>
61 #import <wtf/text/AtomicString.h>
62 #import <wtf/text/CString.h>
64 #pragma mark - Soft Linking
66 #import <pal/cf/CoreMediaSoftLink.h>
68 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
70 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
71 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
72 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
73 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
74 ALLOW_NEW_API_WITHOUT_GUARDS_END
75 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
76 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
82 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
84 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
85 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
87 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
88 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
89 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
92 #pragma mark AVStreamSession
94 @interface AVStreamSession : NSObject
95 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
96 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
100 #pragma mark WebAVStreamDataParserListener
102 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
103 WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
104 Box<Semaphore> _abortSemaphore;
105 AVStreamDataParser* _parser;
107 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
108 @property (assign) Box<Semaphore> abortSemaphore;
109 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
112 @implementation WebAVStreamDataParserListener
113 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
122 [_parser setDelegate:self];
126 @synthesize parent=_parent;
127 @synthesize abortSemaphore=_abortSemaphore;
131 [_parser setDelegate:nil];
137 [_parser setDelegate:nil];
141 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
143 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
145 RetainPtr<AVAsset*> protectedAsset = asset;
146 callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
148 parent->didParseStreamDataAsAsset(protectedAsset.get());
152 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
154 UNUSED_PARAM(discontinuity);
155 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
157 RetainPtr<AVAsset*> protectedAsset = asset;
158 callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
160 parent->didParseStreamDataAsAsset(protectedAsset.get());
164 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
166 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
168 RetainPtr<NSError> protectedError = error;
169 callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
171 parent->didFailToParseStreamDataWithError(protectedError.get());
175 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
177 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
179 RetainPtr<CMSampleBufferRef> protectedSample = sample;
180 callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
182 parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
186 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
188 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
190 callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
192 parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
196 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
198 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
200 // We must call synchronously to the main thread, as the AVStreamSession must be associated
201 // with the streamDataParser before the delegate method returns.
202 Box<BinarySemaphore> respondedSemaphore = Box<BinarySemaphore>::create();
203 callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
205 parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
206 respondedSemaphore->signal();
210 if (respondedSemaphore->waitFor(100_ms))
213 if (_abortSemaphore->waitFor(100_ms)) {
214 _abortSemaphore->signal();
220 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
222 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
224 Box<BinarySemaphore> hasSessionSemaphore = Box<BinarySemaphore>::create();
225 callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
227 parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
231 if (hasSessionSemaphore->waitFor(100_ms))
234 if (_abortSemaphore->waitFor(100_ms)) {
235 _abortSemaphore->signal();
242 @interface WebAVSampleBufferErrorListener : NSObject {
243 WebCore::SourceBufferPrivateAVFObjC* _parent;
244 Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
245 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
246 Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
247 ALLOW_NEW_API_WITHOUT_GUARDS_END
250 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
252 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
253 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
254 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
255 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
256 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
257 ALLOW_NEW_API_WITHOUT_GUARDS_END
260 @implementation WebAVSampleBufferErrorListener
262 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
264 if (!(self = [super init]))
279 if (!_parent && !_layers.size() && !_renderers.size())
282 for (auto& layer : _layers) {
283 [layer removeObserver:self forKeyPath:@"error"];
284 [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
288 for (auto& renderer : _renderers)
289 [renderer removeObserver:self forKeyPath:@"error"];
292 [[NSNotificationCenter defaultCenter] removeObserver:self];
297 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
300 ASSERT(!_layers.contains(layer));
302 _layers.append(layer);
303 [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
304 [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
305 [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
308 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
311 ASSERT(_layers.contains(layer));
313 [layer removeObserver:self forKeyPath:@"error"];
314 [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
315 _layers.remove(_layers.find(layer));
317 [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
320 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
321 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
323 ALLOW_NEW_API_WITHOUT_GUARDS_END
325 ASSERT(!_renderers.contains(renderer));
327 _renderers.append(renderer);
328 [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
331 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
332 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
333 ALLOW_NEW_API_WITHOUT_GUARDS_END
336 ASSERT(_renderers.contains(renderer));
338 [renderer removeObserver:self forKeyPath:@"error"];
339 _renderers.remove(_renderers.find(renderer));
342 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
344 UNUSED_PARAM(context);
345 UNUSED_PARAM(keyPath);
348 RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
349 if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
350 RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
351 ASSERT(_layers.contains(layer.get()));
353 if ([keyPath isEqualTo:@"error"]) {
354 RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
355 callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
356 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
358 } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
359 callOnMainThread([protectedSelf = WTFMove(protectedSelf), obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
360 protectedSelf->_parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
363 ASSERT_NOT_REACHED();
365 } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
366 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
367 RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
368 ALLOW_NEW_API_WITHOUT_GUARDS_END
369 RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
371 ASSERT(_renderers.contains(renderer.get()));
372 ASSERT([keyPath isEqualTo:@"error"]);
374 callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), error = WTFMove(error)] {
375 protectedSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
378 ASSERT_NOT_REACHED();
381 - (void)layerFailedToDecode:(NSNotification*)note
383 RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
384 RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
386 RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
387 callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
388 if (!protectedSelf->_parent || !protectedSelf->_layers.contains(layer.get()))
390 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
397 @interface WebBufferConsumedContext : NSObject {
398 WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
400 @property (readonly) WebCore::SourceBufferPrivateAVFObjC* parent;
403 @implementation WebBufferConsumedContext
404 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
413 - (WebCore::SourceBufferPrivateAVFObjC*)parent
415 return _parent.get();
423 #pragma mark MediaDescriptionAVFObjC
425 class MediaDescriptionAVFObjC final : public MediaDescription {
427 static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
428 virtual ~MediaDescriptionAVFObjC() { }
430 AtomicString codec() const override { return m_codec; }
431 bool isVideo() const override { return m_isVideo; }
432 bool isAudio() const override { return m_isAudio; }
433 bool isText() const override { return m_isText; }
436 MediaDescriptionAVFObjC(AVAssetTrack* track)
437 : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
438 , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
439 , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
441 NSArray* formatDescriptions = [track formatDescriptions];
442 CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
444 FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
445 m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
449 AtomicString m_codec;
456 #pragma mark SourceBufferPrivateAVFObjC
458 static NSString *kBufferConsumedContext = @"BufferConsumedContext";
460 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void*, CFStringRef notificationName, const void*, CFTypeRef payload)
462 if (!isMainThread()) {
463 callOnMainThread([notificationName, payload = retainPtr(payload)] {
464 bufferWasConsumedCallback(nullptr, nullptr, notificationName, nullptr, payload.get());
469 if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
472 ASSERT(CFGetTypeID(payload) == CFDictionaryGetTypeID());
473 WebBufferConsumedContext *context = [(__bridge NSDictionary *)payload valueForKey:kBufferConsumedContext];
477 if (auto sourceBuffer = context.parent)
478 sourceBuffer->bufferWasConsumed();
481 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
483 return adoptRef(new SourceBufferPrivateAVFObjC(parent));
486 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
487 : m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
488 , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
489 , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
490 , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
491 , m_mediaSource(parent)
493 CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
494 m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
497 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
503 CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
505 if (m_hasSessionSemaphore)
506 m_hasSessionSemaphore->signal();
509 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
511 LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
516 if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
517 for (AVAssetTrack *track in [asset tracks]) {
518 if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
519 m_parsingSucceeded = false;
527 m_videoTracks.clear();
528 m_audioTracks.clear();
530 m_discardSamplesUntilNextInitializationSegment = false;
532 SourceBufferPrivateClient::InitializationSegment segment;
534 if ([m_asset respondsToSelector:@selector(overallDurationHint)])
535 segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
537 if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
538 segment.duration = PAL::toMediaTime([m_asset duration]);
540 for (AVAssetTrack* track in [m_asset tracks]) {
541 if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
542 // FIXME(125161): Handle in-band text tracks.
546 if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
547 SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
548 RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
549 info.track = videoTrack;
550 m_videoTracks.append(videoTrack);
551 info.description = MediaDescriptionAVFObjC::create(track);
552 segment.videoTracks.append(info);
553 } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
554 SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
555 RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
556 info.track = audioTrack;
557 m_audioTracks.append(audioTrack);
558 info.description = MediaDescriptionAVFObjC::create(track);
559 segment.audioTracks.append(info);
562 // FIXME(125161): Add TextTrack support
566 m_mediaSource->player()->characteristicsChanged();
569 m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
572 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
577 LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
579 m_parsingSucceeded = false;
582 struct ProcessCodedFrameInfo {
583 SourceBufferPrivateAVFObjC* sourceBuffer;
585 const String& mediaType;
588 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
590 processCodedFrame(trackID, sampleBuffer, mediaType);
593 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
595 if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
596 // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
597 // will just confuse its state. Drop this sample until we can handle text tracks properly.
601 if (m_discardSamplesUntilNextInitializationSegment)
605 Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
606 LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(mediaSample.get()).utf8().data());
607 m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
613 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
618 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
625 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
626 LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
627 m_protectedTrackID = trackID;
629 if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
630 session->addParser(m_parser.get());
631 else if (!CDMSessionAVContentKeySession::isAvailable()) {
632 BEGIN_BLOCK_OBJC_EXCEPTIONS;
633 [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
634 END_BLOCK_OBJC_EXCEPTIONS;
637 UNUSED_PARAM(trackID);
641 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, Box<BinarySemaphore> hasSessionSemaphore)
646 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
647 LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
648 m_protectedTrackID = trackID;
649 RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
650 [initData getBytes:initDataArray->data() length:initDataArray->length()];
651 m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
652 if (auto session = m_mediaSource->player()->cdmSession()) {
653 session->addParser(m_parser.get());
654 hasSessionSemaphore->signal();
656 if (m_hasSessionSemaphore)
657 m_hasSessionSemaphore->signal();
658 m_hasSessionSemaphore = hasSessionSemaphore;
662 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
664 auto initDataBuffer = SharedBuffer::create(initData);
665 m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
669 UNUSED_PARAM(initData);
670 UNUSED_PARAM(trackID);
671 UNUSED_PARAM(hasSessionSemaphore);
674 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
679 static dispatch_queue_t globalDataParserQueue()
681 static dispatch_queue_t globalQueue;
682 static dispatch_once_t onceToken;
683 dispatch_once(&onceToken, ^{
684 globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
689 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
691 LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data.data(), data.size());
693 // FIXME: Avoid the data copy by wrapping around the Vector<> object.
694 RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data.data() length:data.size()]);
695 WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr(*this);
696 RetainPtr<AVStreamDataParser> parser = m_parser;
697 RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
699 m_parsingSucceeded = true;
700 dispatch_group_enter(m_isAppendingGroup.get());
702 dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
703 if (parserStateWasReset)
704 [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
706 [parser appendStreamData:nsData.get()];
708 callOnMainThread([weakThis] {
710 weakThis->appendCompleted();
712 dispatch_group_leave(isAppendingGroup.get());
714 m_parserStateWasReset = false;
717 void SourceBufferPrivateAVFObjC::appendCompleted()
719 if (m_parsingSucceeded && m_mediaSource)
720 m_mediaSource->player()->setLoadingProgresssed(true);
723 m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
726 void SourceBufferPrivateAVFObjC::abort()
728 // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
729 // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
730 // semaphore, the m_isAppendingGroup wait operation will deadlock.
731 if (m_hasSessionSemaphore)
732 m_hasSessionSemaphore->signal();
733 m_delegate.get().abortSemaphore->signal();
734 dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
735 m_appendWeakFactory.revokeAll();
736 m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
737 m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
740 void SourceBufferPrivateAVFObjC::resetParserState()
742 m_parserStateWasReset = true;
743 m_discardSamplesUntilNextInitializationSegment = true;
746 void SourceBufferPrivateAVFObjC::destroyParser()
748 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
749 if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
750 [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
752 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
754 [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
757 [m_delegate invalidate];
758 m_delegate = nullptr;
762 void SourceBufferPrivateAVFObjC::destroyRenderers()
765 setVideoLayer(nullptr);
767 if (m_decompressionSession)
768 setDecompressionSession(nullptr);
770 for (auto& renderer : m_audioRenderers.values()) {
772 m_mediaSource->player()->removeAudioRenderer(renderer.get());
774 [renderer stopRequestingMediaData];
775 [m_errorListener stopObservingRenderer:renderer.get()];
778 m_audioRenderers.clear();
781 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
787 m_mediaSource->removeSourceBuffer(this);
790 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
792 return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
795 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
798 m_mediaSource->player()->setReadyState(readyState);
801 bool SourceBufferPrivateAVFObjC::hasVideo() const
803 return m_client && m_client->sourceBufferPrivateHasVideo();
806 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
808 return m_enabledVideoTrackID != -1;
811 bool SourceBufferPrivateAVFObjC::hasAudio() const
813 return m_client && m_client->sourceBufferPrivateHasAudio();
816 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
818 int trackID = track->trackID();
819 if (!track->selected() && m_enabledVideoTrackID == trackID) {
820 m_enabledVideoTrackID = -1;
821 [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
823 if (m_decompressionSession)
824 m_decompressionSession->stopRequestingMediaData();
825 } else if (track->selected()) {
826 m_enabledVideoTrackID = trackID;
827 [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
829 if (m_decompressionSession) {
830 m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
831 didBecomeReadyForMoreSamples(trackID);
836 m_mediaSource->hasSelectedVideoChanged(*this);
839 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
841 int trackID = track->trackID();
843 if (!track->enabled()) {
844 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
845 RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
846 ALLOW_NEW_API_WITHOUT_GUARDS_END
847 [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
849 m_mediaSource->player()->removeAudioRenderer(renderer.get());
851 [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
852 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
853 RetainPtr<AVSampleBufferAudioRenderer> renderer;
854 ALLOW_NEW_API_WITHOUT_GUARDS_END
855 if (!m_audioRenderers.contains(trackID)) {
856 renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
857 auto weakThis = createWeakPtr();
858 [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
860 weakThis->didBecomeReadyForMoreSamples(trackID);
862 m_audioRenderers.set(trackID, renderer);
863 [m_errorListener beginObservingRenderer:renderer.get()];
865 renderer = m_audioRenderers.get(trackID);
868 m_mediaSource->player()->addAudioRenderer(renderer.get());
872 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
874 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
875 if (session == m_session)
879 m_session->removeSourceBuffer(this);
884 m_session->addSourceBuffer(this);
885 if (m_hasSessionSemaphore) {
886 m_hasSessionSemaphore->signal();
887 m_hasSessionSemaphore = nullptr;
891 WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
892 callOnMainThread([weakThis] {
893 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
896 bool ignored = false;
897 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
902 UNUSED_PARAM(session);
906 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
908 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
909 auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
910 if (!fpsInstance || fpsInstance == m_cdmInstance)
914 [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
916 m_cdmInstance = fpsInstance;
919 [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_parser.get()];
920 if (m_hasSessionSemaphore) {
921 m_hasSessionSemaphore->signal();
922 m_hasSessionSemaphore = nullptr;
926 UNUSED_PARAM(instance);
930 void SourceBufferPrivateAVFObjC::flush()
934 for (auto& renderer : m_audioRenderers.values())
935 flush(renderer.get());
938 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
940 ASSERT(!m_errorClients.contains(client));
941 m_errorClients.append(client);
944 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
946 ASSERT(m_errorClients.contains(client));
947 m_errorClients.remove(m_errorClients.find(client));
950 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
952 LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
954 // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
955 bool anyIgnored = false;
956 for (auto& client : m_errorClients) {
957 bool shouldIgnore = false;
958 client->layerDidReceiveError(layer, error, shouldIgnore);
959 anyIgnored |= shouldIgnore;
964 int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
967 m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
970 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
972 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
973 if (m_mediaSource->player()->cdmInstance()) {
974 m_mediaSource->player()->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
978 UNUSED_PARAM(obscured);
981 RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
982 layerDidReceiveError(m_displayLayer.get(), error.get());
985 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
986 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
987 ALLOW_NEW_API_WITHOUT_GUARDS_END
989 LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
991 if ([error code] == 'HDCP')
994 // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
995 bool anyIgnored = false;
996 for (auto& client : m_errorClients) {
997 bool shouldIgnore = false;
998 client->rendererDidReceiveError(renderer, error, shouldIgnore);
999 anyIgnored |= shouldIgnore;
1005 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1007 int trackID = trackIDString.toInt();
1008 LOG(MediaSource, "SourceBufferPrivateAVFObjC::flush(%p) - trackId: %d", this, trackID);
1010 if (trackID == m_enabledVideoTrackID) {
1012 } else if (m_audioRenderers.contains(trackID))
1013 flush(m_audioRenderers.get(trackID).get());
1016 void SourceBufferPrivateAVFObjC::flushVideo()
1018 [m_displayLayer flush];
1020 if (m_decompressionSession) {
1021 m_decompressionSession->flush();
1022 m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1023 if (weakThis && weakThis->m_mediaSource)
1024 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1028 m_cachedSize = std::nullopt;
1030 if (m_mediaSource) {
1031 m_mediaSource->player()->setHasAvailableVideoFrame(false);
1032 m_mediaSource->player()->flushPendingSizeChanges();
1036 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1037 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1038 ALLOW_NEW_API_WITHOUT_GUARDS_END
1043 m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1046 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1048 int trackID = trackIDString.toInt();
1049 if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1052 PlatformSample platformSample = sample->platformSample();
1053 if (platformSample.type != PlatformSample::CMSampleBufferType)
1056 LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(sample.get()).utf8().data());
1058 if (trackID == m_enabledVideoTrackID) {
1059 CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1060 FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1061 if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1062 LOG(MediaSource, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
1063 bool sizeWasNull = !m_cachedSize;
1064 m_cachedSize = formatSize;
1065 if (m_mediaSource) {
1067 m_mediaSource->player()->setNaturalSize(formatSize);
1069 m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1073 if (m_decompressionSession)
1074 m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1076 if (m_displayLayer) {
1077 if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1078 auto context = adoptNS([[WebBufferConsumedContext alloc] initWithParent:createWeakPtr()]);
1079 CMSampleBufferRef rawSampleCopy;
1080 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1081 auto sampleCopy = adoptCF(rawSampleCopy);
1082 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{kBufferConsumedContext: context.get()}, kCMAttachmentMode_ShouldNotPropagate);
1083 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1085 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1088 auto renderer = m_audioRenderers.get(trackID);
1089 [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1090 if (m_mediaSource && !sample->isNonDisplaying())
1091 m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1095 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1098 m_mediaSource->player()->setHasAvailableVideoFrame(true);
1101 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1103 int trackID = trackIDString.toInt();
1104 if (trackID == m_enabledVideoTrackID) {
1105 if (m_decompressionSession)
1106 return m_decompressionSession->isReadyForMoreMediaData();
1108 return [m_displayLayer isReadyForMoreMediaData];
1111 if (m_audioRenderers.contains(trackID))
1112 return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1117 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1120 m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1123 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
1127 return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1130 void SourceBufferPrivateAVFObjC::willSeek()
1135 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
1138 m_client->sourceBufferPrivateSeekToTime(time);
1141 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1143 return m_cachedSize.value_or(FloatSize());
1146 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1148 LOG(Media, "SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(%p) - track(%d)", this, trackID);
1149 if (trackID == m_enabledVideoTrackID) {
1150 if (m_decompressionSession)
1151 m_decompressionSession->stopRequestingMediaData();
1152 [m_displayLayer stopRequestingMediaData];
1153 } else if (m_audioRenderers.contains(trackID))
1154 [m_audioRenderers.get(trackID) stopRequestingMediaData];
1159 m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1162 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1164 int trackID = trackIDString.toInt();
1165 if (trackID == m_enabledVideoTrackID) {
1166 if (m_decompressionSession) {
1167 m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1168 didBecomeReadyForMoreSamples(trackID);
1171 if (m_displayLayer) {
1172 auto weakThis = createWeakPtr();
1173 [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1175 weakThis->didBecomeReadyForMoreSamples(trackID);
1178 } else if (m_audioRenderers.contains(trackID)) {
1179 auto weakThis = createWeakPtr();
1180 [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1182 weakThis->didBecomeReadyForMoreSamples(trackID);
1187 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1189 MediaEngineSupportParameters parameters;
1190 parameters.isMediaSource = true;
1191 parameters.type = contentType;
1192 return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1195 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1197 if (layer == m_displayLayer)
1200 ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1202 if (m_displayLayer) {
1203 [m_displayLayer flush];
1204 [m_displayLayer stopRequestingMediaData];
1205 [m_errorListener stopObservingLayer:m_displayLayer.get()];
1208 m_displayLayer = layer;
1210 if (m_displayLayer) {
1211 auto weakThis = createWeakPtr();
1212 [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1214 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1216 [m_errorListener beginObservingLayer:m_displayLayer.get()];
1218 m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1222 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1224 if (m_decompressionSession == decompressionSession)
1227 if (m_decompressionSession) {
1228 m_decompressionSession->stopRequestingMediaData();
1229 m_decompressionSession->invalidate();
1232 m_decompressionSession = decompressionSession;
1234 if (!m_decompressionSession)
1237 WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1238 m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1240 weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1242 m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1243 if (weakThis && weakThis->m_mediaSource)
1244 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1247 m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1252 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)