2 * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #import "SourceBufferPrivateAVFObjC.h"
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
34 #import "CDMSessionAVContentKeySession.h"
35 #import "CDMSessionMediaSourceAVFObjC.h"
36 #import "InbandTextTrackPrivateAVFObjC.h"
38 #import "MediaDescription.h"
39 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
40 #import "MediaSample.h"
41 #import "MediaSampleAVFObjC.h"
42 #import "MediaSourcePrivateAVFObjC.h"
43 #import "NotImplemented.h"
44 #import "SharedBuffer.h"
45 #import "SourceBufferPrivateClient.h"
46 #import "TimeRanges.h"
47 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
48 #import "WebCoreDecompressionSession.h"
49 #import <AVFoundation/AVAssetTrack.h>
50 #import <JavaScriptCore/TypedArrayInlines.h>
51 #import <QuartzCore/CALayer.h>
52 #import <objc/runtime.h>
53 #import <pal/avfoundation/MediaTimeAVFoundation.h>
54 #import <pal/spi/mac/AVFoundationSPI.h>
55 #import <wtf/BlockObjCExceptions.h>
56 #import <wtf/HashCountedSet.h>
57 #import <wtf/MainThread.h>
58 #import <wtf/SoftLinking.h>
59 #import <wtf/WeakPtr.h>
60 #import <wtf/text/AtomicString.h>
61 #import <wtf/text/CString.h>
63 #pragma mark - Soft Linking
65 #import <pal/cf/CoreMediaSoftLink.h>
67 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
69 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
70 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
71 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
72 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
73 ALLOW_NEW_API_WITHOUT_GUARDS_END
74 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
75 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
77 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
83 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
84 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
86 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
87 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
88 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
91 #pragma mark AVStreamSession
93 @interface AVStreamSession : NSObject
94 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
95 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
99 #pragma mark WebAVStreamDataParserListener
101 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
102 WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
103 OSObjectPtr<dispatch_semaphore_t> _abortSemaphore;
104 AVStreamDataParser* _parser;
106 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
107 @property (assign) OSObjectPtr<dispatch_semaphore_t> abortSemaphore;
108 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
111 @implementation WebAVStreamDataParserListener
112 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
121 [_parser setDelegate:self];
125 @synthesize parent=_parent;
126 @synthesize abortSemaphore=_abortSemaphore;
130 [_parser setDelegate:nil];
136 [_parser setDelegate:nil];
140 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
142 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
144 RetainPtr<AVAsset*> protectedAsset = asset;
145 callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
147 parent->didParseStreamDataAsAsset(protectedAsset.get());
151 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
153 UNUSED_PARAM(discontinuity);
154 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
156 RetainPtr<AVAsset*> protectedAsset = asset;
157 callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
159 parent->didParseStreamDataAsAsset(protectedAsset.get());
163 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
165 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
167 RetainPtr<NSError> protectedError = error;
168 callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
170 parent->didFailToParseStreamDataWithError(protectedError.get());
174 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
176 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
178 RetainPtr<CMSampleBufferRef> protectedSample = sample;
179 callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
181 parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
185 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
187 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
189 callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
191 parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
195 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
197 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
199 // We must call synchronously to the main thread, as the AVStreamSession must be associated
200 // with the streamDataParser before the delegate method returns.
201 OSObjectPtr<dispatch_semaphore_t> respondedSemaphore = adoptOSObject(dispatch_semaphore_create(0));
202 callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
204 parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
205 dispatch_semaphore_signal(respondedSemaphore.get());
209 if (!dispatch_semaphore_wait(respondedSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100)))
212 if (!dispatch_semaphore_wait(_abortSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100))) {
213 dispatch_semaphore_signal(_abortSemaphore.get());
219 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
221 ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
223 OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore = adoptOSObject(dispatch_semaphore_create(0));
224 callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
226 parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
230 if (!dispatch_semaphore_wait(hasSessionSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100)))
233 if (!dispatch_semaphore_wait(_abortSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100))) {
234 dispatch_semaphore_signal(_abortSemaphore.get());
241 @interface WebAVSampleBufferErrorListener : NSObject {
242 WebCore::SourceBufferPrivateAVFObjC* _parent;
243 Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
244 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
245 Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
246 ALLOW_NEW_API_WITHOUT_GUARDS_END
249 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
251 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
252 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
253 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
254 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
255 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
256 ALLOW_NEW_API_WITHOUT_GUARDS_END
259 @implementation WebAVSampleBufferErrorListener
261 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
263 if (!(self = [super init]))
278 if (!_parent && !_layers.size() && !_renderers.size())
281 for (auto& layer : _layers) {
282 [layer removeObserver:self forKeyPath:@"error"];
283 [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
287 for (auto& renderer : _renderers)
288 [renderer removeObserver:self forKeyPath:@"error"];
291 [[NSNotificationCenter defaultCenter] removeObserver:self];
296 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
299 ASSERT(!_layers.contains(layer));
301 _layers.append(layer);
302 [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
303 [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
304 [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
307 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
310 ASSERT(_layers.contains(layer));
312 [layer removeObserver:self forKeyPath:@"error"];
313 [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
314 _layers.remove(_layers.find(layer));
316 [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
319 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
320 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
322 ALLOW_NEW_API_WITHOUT_GUARDS_END
324 ASSERT(!_renderers.contains(renderer));
326 _renderers.append(renderer);
327 [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
330 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
331 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
332 ALLOW_NEW_API_WITHOUT_GUARDS_END
335 ASSERT(_renderers.contains(renderer));
337 [renderer removeObserver:self forKeyPath:@"error"];
338 _renderers.remove(_renderers.find(renderer));
341 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
343 UNUSED_PARAM(context);
344 UNUSED_PARAM(keyPath);
347 RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
348 if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
349 RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
350 ASSERT(_layers.contains(layer.get()));
352 if ([keyPath isEqualTo:@"error"]) {
353 RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
354 callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
355 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
357 } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
358 callOnMainThread([protectedSelf = WTFMove(protectedSelf), obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
359 protectedSelf->_parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
362 ASSERT_NOT_REACHED();
364 } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
365 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
366 RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
367 ALLOW_NEW_API_WITHOUT_GUARDS_END
368 RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
370 ASSERT(_renderers.contains(renderer.get()));
371 ASSERT([keyPath isEqualTo:@"error"]);
373 callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), error = WTFMove(error)] {
374 protectedSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
377 ASSERT_NOT_REACHED();
380 - (void)layerFailedToDecode:(NSNotification*)note
382 RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
383 RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
385 RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
386 callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
387 if (!protectedSelf->_parent || !protectedSelf->_layers.contains(layer.get()))
389 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
396 @interface WebBufferConsumedContext : NSObject {
397 WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
399 @property (readonly) WebCore::SourceBufferPrivateAVFObjC* parent;
402 @implementation WebBufferConsumedContext
403 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
412 - (WebCore::SourceBufferPrivateAVFObjC*)parent
414 return _parent.get();
422 #pragma mark MediaDescriptionAVFObjC
424 class MediaDescriptionAVFObjC final : public MediaDescription {
426 static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
427 virtual ~MediaDescriptionAVFObjC() { }
429 AtomicString codec() const override { return m_codec; }
430 bool isVideo() const override { return m_isVideo; }
431 bool isAudio() const override { return m_isAudio; }
432 bool isText() const override { return m_isText; }
435 MediaDescriptionAVFObjC(AVAssetTrack* track)
436 : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
437 , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
438 , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
440 NSArray* formatDescriptions = [track formatDescriptions];
441 CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
443 FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
444 m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
448 AtomicString m_codec;
455 #pragma mark SourceBufferPrivateAVFObjC
457 static NSString *kBufferConsumedContext = @"BufferConsumedContext";
459 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void*, CFStringRef notificationName, const void*, CFTypeRef payload)
461 if (!isMainThread()) {
462 callOnMainThread([notificationName, payload = retainPtr(payload)] {
463 bufferWasConsumedCallback(nullptr, nullptr, notificationName, nullptr, payload.get());
468 if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
471 ASSERT(CFGetTypeID(payload) == CFDictionaryGetTypeID());
472 WebBufferConsumedContext *context = [(__bridge NSDictionary *)payload valueForKey:kBufferConsumedContext];
476 if (auto sourceBuffer = context.parent)
477 sourceBuffer->bufferWasConsumed();
480 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
482 return adoptRef(new SourceBufferPrivateAVFObjC(parent));
485 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
486 : m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
487 , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
488 , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
489 , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
490 , m_mediaSource(parent)
492 CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
493 m_delegate.get().abortSemaphore = adoptOSObject(dispatch_semaphore_create(0));
496 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
502 CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
504 if (m_hasSessionSemaphore)
505 dispatch_semaphore_signal(m_hasSessionSemaphore.get());
508 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
510 LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
515 if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
516 for (AVAssetTrack *track in [asset tracks]) {
517 if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
518 m_parsingSucceeded = false;
526 m_videoTracks.clear();
527 m_audioTracks.clear();
529 m_discardSamplesUntilNextInitializationSegment = false;
531 SourceBufferPrivateClient::InitializationSegment segment;
533 if ([m_asset respondsToSelector:@selector(overallDurationHint)])
534 segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
536 if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
537 segment.duration = PAL::toMediaTime([m_asset duration]);
539 for (AVAssetTrack* track in [m_asset tracks]) {
540 if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
541 // FIXME(125161): Handle in-band text tracks.
545 if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
546 SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
547 RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
548 info.track = videoTrack;
549 m_videoTracks.append(videoTrack);
550 info.description = MediaDescriptionAVFObjC::create(track);
551 segment.videoTracks.append(info);
552 } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
553 SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
554 RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
555 info.track = audioTrack;
556 m_audioTracks.append(audioTrack);
557 info.description = MediaDescriptionAVFObjC::create(track);
558 segment.audioTracks.append(info);
561 // FIXME(125161): Add TextTrack support
565 m_mediaSource->player()->characteristicsChanged();
568 m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
571 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
576 LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
578 m_parsingSucceeded = false;
581 struct ProcessCodedFrameInfo {
582 SourceBufferPrivateAVFObjC* sourceBuffer;
584 const String& mediaType;
587 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
589 processCodedFrame(trackID, sampleBuffer, mediaType);
592 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
594 if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
595 // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
596 // will just confuse its state. Drop this sample until we can handle text tracks properly.
600 if (m_discardSamplesUntilNextInitializationSegment)
604 Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
605 LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(mediaSample.get()).utf8().data());
606 m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
612 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
617 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
624 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
625 LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
626 m_protectedTrackID = trackID;
628 if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
629 session->addParser(m_parser.get());
630 else if (!CDMSessionAVContentKeySession::isAvailable()) {
631 BEGIN_BLOCK_OBJC_EXCEPTIONS;
632 [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
633 END_BLOCK_OBJC_EXCEPTIONS;
636 UNUSED_PARAM(trackID);
640 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore)
645 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
646 LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
647 m_protectedTrackID = trackID;
648 RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
649 [initData getBytes:initDataArray->data() length:initDataArray->length()];
650 m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
651 if (auto session = m_mediaSource->player()->cdmSession()) {
652 session->addParser(m_parser.get());
653 dispatch_semaphore_signal(hasSessionSemaphore.get());
655 if (m_hasSessionSemaphore)
656 dispatch_semaphore_signal(m_hasSessionSemaphore.get());
657 m_hasSessionSemaphore = hasSessionSemaphore;
661 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
663 auto initDataBuffer = SharedBuffer::create(initData);
664 m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
668 UNUSED_PARAM(initData);
669 UNUSED_PARAM(trackID);
670 UNUSED_PARAM(hasSessionSemaphore);
673 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
678 static dispatch_queue_t globalDataParserQueue()
680 static dispatch_queue_t globalQueue;
681 static dispatch_once_t onceToken;
682 dispatch_once(&onceToken, ^{
683 globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
688 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
690 LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data.data(), data.size());
692 // FIXME: Avoid the data copy by wrapping around the Vector<> object.
693 RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data.data() length:data.size()]);
694 WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr(*this);
695 RetainPtr<AVStreamDataParser> parser = m_parser;
696 RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
698 m_parsingSucceeded = true;
699 dispatch_group_enter(m_isAppendingGroup.get());
701 dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
702 if (parserStateWasReset)
703 [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
705 [parser appendStreamData:nsData.get()];
707 callOnMainThread([weakThis] {
709 weakThis->appendCompleted();
711 dispatch_group_leave(isAppendingGroup.get());
713 m_parserStateWasReset = false;
716 void SourceBufferPrivateAVFObjC::appendCompleted()
718 if (m_parsingSucceeded && m_mediaSource)
719 m_mediaSource->player()->setLoadingProgresssed(true);
722 m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
725 void SourceBufferPrivateAVFObjC::abort()
727 // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
728 // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
729 // semaphore, the m_isAppendingGroup wait operation will deadlock.
730 if (m_hasSessionSemaphore)
731 dispatch_semaphore_signal(m_hasSessionSemaphore.get());
732 dispatch_semaphore_signal(m_delegate.get().abortSemaphore.get());
733 dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
734 m_appendWeakFactory.revokeAll();
735 m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
736 m_delegate.get().abortSemaphore = adoptOSObject(dispatch_semaphore_create(0));
739 void SourceBufferPrivateAVFObjC::resetParserState()
741 m_parserStateWasReset = true;
742 m_discardSamplesUntilNextInitializationSegment = true;
745 void SourceBufferPrivateAVFObjC::destroyParser()
747 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
748 if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
749 [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
751 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
753 [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
756 [m_delegate invalidate];
757 m_delegate = nullptr;
761 void SourceBufferPrivateAVFObjC::destroyRenderers()
764 setVideoLayer(nullptr);
766 if (m_decompressionSession)
767 setDecompressionSession(nullptr);
769 for (auto& renderer : m_audioRenderers.values()) {
771 m_mediaSource->player()->removeAudioRenderer(renderer.get());
773 [renderer stopRequestingMediaData];
774 [m_errorListener stopObservingRenderer:renderer.get()];
777 m_audioRenderers.clear();
780 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
786 m_mediaSource->removeSourceBuffer(this);
789 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
791 return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
794 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
797 m_mediaSource->player()->setReadyState(readyState);
800 bool SourceBufferPrivateAVFObjC::hasVideo() const
802 return m_client && m_client->sourceBufferPrivateHasVideo();
805 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
807 return m_enabledVideoTrackID != -1;
810 bool SourceBufferPrivateAVFObjC::hasAudio() const
812 return m_client && m_client->sourceBufferPrivateHasAudio();
815 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
817 int trackID = track->trackID();
818 if (!track->selected() && m_enabledVideoTrackID == trackID) {
819 m_enabledVideoTrackID = -1;
820 [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
822 if (m_decompressionSession)
823 m_decompressionSession->stopRequestingMediaData();
824 } else if (track->selected()) {
825 m_enabledVideoTrackID = trackID;
826 [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
828 if (m_decompressionSession) {
829 m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
830 didBecomeReadyForMoreSamples(trackID);
835 m_mediaSource->hasSelectedVideoChanged(*this);
838 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
840 int trackID = track->trackID();
842 if (!track->enabled()) {
843 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
844 RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
845 ALLOW_NEW_API_WITHOUT_GUARDS_END
846 [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
848 m_mediaSource->player()->removeAudioRenderer(renderer.get());
850 [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
851 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
852 RetainPtr<AVSampleBufferAudioRenderer> renderer;
853 ALLOW_NEW_API_WITHOUT_GUARDS_END
854 if (!m_audioRenderers.contains(trackID)) {
855 renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
856 auto weakThis = createWeakPtr();
857 [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
859 weakThis->didBecomeReadyForMoreSamples(trackID);
861 m_audioRenderers.set(trackID, renderer);
862 [m_errorListener beginObservingRenderer:renderer.get()];
864 renderer = m_audioRenderers.get(trackID);
867 m_mediaSource->player()->addAudioRenderer(renderer.get());
871 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
873 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
874 if (session == m_session)
878 m_session->removeSourceBuffer(this);
883 m_session->addSourceBuffer(this);
884 if (m_hasSessionSemaphore) {
885 dispatch_semaphore_signal(m_hasSessionSemaphore.get());
886 m_hasSessionSemaphore = nullptr;
890 WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
891 callOnMainThread([weakThis] {
892 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
895 bool ignored = false;
896 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
901 UNUSED_PARAM(session);
905 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
907 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
908 auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
909 if (!fpsInstance || fpsInstance == m_cdmInstance)
913 [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
915 m_cdmInstance = fpsInstance;
918 [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_parser.get()];
919 if (m_hasSessionSemaphore) {
920 dispatch_semaphore_signal(m_hasSessionSemaphore.get());
921 m_hasSessionSemaphore = nullptr;
925 UNUSED_PARAM(instance);
929 void SourceBufferPrivateAVFObjC::flush()
933 for (auto& renderer : m_audioRenderers.values())
934 flush(renderer.get());
937 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
939 ASSERT(!m_errorClients.contains(client));
940 m_errorClients.append(client);
943 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
945 ASSERT(m_errorClients.contains(client));
946 m_errorClients.remove(m_errorClients.find(client));
949 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
951 LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
953 // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
954 bool anyIgnored = false;
955 for (auto& client : m_errorClients) {
956 bool shouldIgnore = false;
957 client->layerDidReceiveError(layer, error, shouldIgnore);
958 anyIgnored |= shouldIgnore;
963 int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
966 m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
969 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
971 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
972 if (m_mediaSource->player()->cdmInstance()) {
973 m_mediaSource->player()->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
977 UNUSED_PARAM(obscured);
980 RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
981 layerDidReceiveError(m_displayLayer.get(), error.get());
984 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
985 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
986 ALLOW_NEW_API_WITHOUT_GUARDS_END
988 LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
990 if ([error code] == 'HDCP')
993 // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
994 bool anyIgnored = false;
995 for (auto& client : m_errorClients) {
996 bool shouldIgnore = false;
997 client->rendererDidReceiveError(renderer, error, shouldIgnore);
998 anyIgnored |= shouldIgnore;
1004 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1006 int trackID = trackIDString.toInt();
1007 LOG(MediaSource, "SourceBufferPrivateAVFObjC::flush(%p) - trackId: %d", this, trackID);
1009 if (trackID == m_enabledVideoTrackID) {
1011 } else if (m_audioRenderers.contains(trackID))
1012 flush(m_audioRenderers.get(trackID).get());
1015 void SourceBufferPrivateAVFObjC::flushVideo()
1017 [m_displayLayer flush];
1019 if (m_decompressionSession) {
1020 m_decompressionSession->flush();
1021 m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1022 if (weakThis && weakThis->m_mediaSource)
1023 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1027 m_cachedSize = std::nullopt;
1029 if (m_mediaSource) {
1030 m_mediaSource->player()->setHasAvailableVideoFrame(false);
1031 m_mediaSource->player()->flushPendingSizeChanges();
1035 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1036 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1037 ALLOW_NEW_API_WITHOUT_GUARDS_END
1042 m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1045 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1047 int trackID = trackIDString.toInt();
1048 if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1051 PlatformSample platformSample = sample->platformSample();
1052 if (platformSample.type != PlatformSample::CMSampleBufferType)
1055 LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(sample.get()).utf8().data());
1057 if (trackID == m_enabledVideoTrackID) {
1058 CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1059 FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1060 if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1061 LOG(MediaSource, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
1062 bool sizeWasNull = !m_cachedSize;
1063 m_cachedSize = formatSize;
1064 if (m_mediaSource) {
1066 m_mediaSource->player()->setNaturalSize(formatSize);
1068 m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1072 if (m_decompressionSession)
1073 m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1075 if (m_displayLayer) {
1076 if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1077 auto context = adoptNS([[WebBufferConsumedContext alloc] initWithParent:createWeakPtr()]);
1078 CMSampleBufferRef rawSampleCopy;
1079 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1080 auto sampleCopy = adoptCF(rawSampleCopy);
1081 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{kBufferConsumedContext: context.get()}, kCMAttachmentMode_ShouldNotPropagate);
1082 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1084 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1087 auto renderer = m_audioRenderers.get(trackID);
1088 [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1089 if (m_mediaSource && !sample->isNonDisplaying())
1090 m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1094 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1097 m_mediaSource->player()->setHasAvailableVideoFrame(true);
1100 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1102 int trackID = trackIDString.toInt();
1103 if (trackID == m_enabledVideoTrackID) {
1104 if (m_decompressionSession)
1105 return m_decompressionSession->isReadyForMoreMediaData();
1107 return [m_displayLayer isReadyForMoreMediaData];
1110 if (m_audioRenderers.contains(trackID))
1111 return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1116 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1119 m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1122 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
1126 return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1129 void SourceBufferPrivateAVFObjC::willSeek()
1134 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
1137 m_client->sourceBufferPrivateSeekToTime(time);
1140 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1142 return m_cachedSize.value_or(FloatSize());
1145 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1147 LOG(Media, "SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(%p) - track(%d)", this, trackID);
1148 if (trackID == m_enabledVideoTrackID) {
1149 if (m_decompressionSession)
1150 m_decompressionSession->stopRequestingMediaData();
1151 [m_displayLayer stopRequestingMediaData];
1152 } else if (m_audioRenderers.contains(trackID))
1153 [m_audioRenderers.get(trackID) stopRequestingMediaData];
1158 m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1161 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1163 int trackID = trackIDString.toInt();
1164 if (trackID == m_enabledVideoTrackID) {
1165 if (m_decompressionSession) {
1166 m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1167 didBecomeReadyForMoreSamples(trackID);
1170 if (m_displayLayer) {
1171 auto weakThis = createWeakPtr();
1172 [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1174 weakThis->didBecomeReadyForMoreSamples(trackID);
1177 } else if (m_audioRenderers.contains(trackID)) {
1178 auto weakThis = createWeakPtr();
1179 [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1181 weakThis->didBecomeReadyForMoreSamples(trackID);
1186 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1188 MediaEngineSupportParameters parameters;
1189 parameters.isMediaSource = true;
1190 parameters.type = contentType;
1191 return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1194 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1196 if (layer == m_displayLayer)
1199 ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1201 if (m_displayLayer) {
1202 [m_displayLayer flush];
1203 [m_displayLayer stopRequestingMediaData];
1204 [m_errorListener stopObservingLayer:m_displayLayer.get()];
1207 m_displayLayer = layer;
1209 if (m_displayLayer) {
1210 auto weakThis = createWeakPtr();
1211 [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1213 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1215 [m_errorListener beginObservingLayer:m_displayLayer.get()];
1217 m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1221 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1223 if (m_decompressionSession == decompressionSession)
1226 if (m_decompressionSession) {
1227 m_decompressionSession->stopRequestingMediaData();
1228 m_decompressionSession->invalidate();
1231 m_decompressionSession = decompressionSession;
1233 if (!m_decompressionSession)
1236 WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1237 m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1239 weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1241 m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1242 if (weakThis && weakThis->m_mediaSource)
1243 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1246 m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1251 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)