Allow clients to override their own hardware media requirements where no fallback...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationSPI.h"
33 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
34 #import "CDMSessionAVContentKeySession.h"
35 #import "CDMSessionMediaSourceAVFObjC.h"
36 #import "InbandTextTrackPrivateAVFObjC.h"
37 #import "Logging.h"
38 #import "MediaDescription.h"
39 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
40 #import "MediaSample.h"
41 #import "MediaSampleAVFObjC.h"
42 #import "MediaSourcePrivateAVFObjC.h"
43 #import "MediaTimeAVFoundation.h"
44 #import "NotImplemented.h"
45 #import "SourceBufferPrivateClient.h"
46 #import "TimeRanges.h"
47 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
48 #import "WebCoreDecompressionSession.h"
49 #import <AVFoundation/AVAssetTrack.h>
50 #import <QuartzCore/CALayer.h>
51 #import <map>
52 #import <objc/runtime.h>
53 #import <runtime/TypedArrayInlines.h>
54 #import <wtf/BlockObjCExceptions.h>
55 #import <wtf/HashCountedSet.h>
56 #import <wtf/MainThread.h>
57 #import <wtf/SoftLinking.h>
58 #import <wtf/WeakPtr.h>
59 #import <wtf/text/AtomicString.h>
60 #import <wtf/text/CString.h>
61
62 #pragma mark - Soft Linking
63
64 #import "CoreMediaSoftLink.h"
65
66 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
67
68 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
69 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
70 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
71 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
72 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
73
74 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
75 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
76 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
77
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
82 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
83
84 #define AVMediaTypeVideo getAVMediaTypeVideo()
85 #define AVMediaTypeAudio getAVMediaTypeAudio()
86 #define AVMediaTypeText getAVMediaTypeText()
87 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
88 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
89
90 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
91 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
92 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
93
94 #pragma mark -
95 #pragma mark AVStreamSession
96
97 @interface AVStreamSession : NSObject
98 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
99 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
100 @end
101
102 #pragma mark -
103 #pragma mark WebAVStreamDataParserListener
104
105 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
106     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
107     AVStreamDataParser* _parser;
108 }
109 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
110 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
111 @end
112
113 @implementation WebAVStreamDataParserListener
114 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
115 {
116     self = [super init];
117     if (!self)
118         return nil;
119
120     ASSERT(parent);
121     _parent = parent;
122     _parser = parser;
123     [_parser setDelegate:self];
124     return self;
125 }
126
127 @synthesize parent=_parent;
128
129 - (void)dealloc
130 {
131     [_parser setDelegate:nil];
132     [super dealloc];
133 }
134
135 - (void)invalidate
136 {
137     [_parser setDelegate:nil];
138     _parser = nullptr;
139 }
140
141 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
142 {
143     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
144
145     RetainPtr<AVAsset*> protectedAsset = asset;
146     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
147         if (parent)
148             parent->didParseStreamDataAsAsset(protectedAsset.get());
149     });
150 }
151
152 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
153 {
154     UNUSED_PARAM(discontinuity);
155     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
156
157     RetainPtr<AVAsset*> protectedAsset = asset;
158     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
159         if (parent)
160             parent->didParseStreamDataAsAsset(protectedAsset.get());
161     });
162 }
163
164 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
165 {
166     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
167
168     RetainPtr<NSError> protectedError = error;
169     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
170         if (parent)
171             parent->didFailToParseStreamDataWithError(protectedError.get());
172     });
173 }
174
175 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
176 {
177     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
178
179     RetainPtr<CMSampleBufferRef> protectedSample = sample;
180     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
181         if (parent)
182             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
183     });
184 }
185
186 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
187 {
188     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
189
190     callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
191         if (parent)
192             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
193     });
194 }
195
196 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
197 {
198     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
199
200     // We must call synchronously to the main thread, as the AVStreamSession must be associated
201     // with the streamDataParser before the delegate method returns.
202     dispatch_sync(dispatch_get_main_queue(), [parent = _parent, trackID]() {
203         if (parent)
204             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
205     });
206 }
207
208 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
209 {
210     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
211
212     OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore = adoptOSObject(dispatch_semaphore_create(0));
213     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
214         if (parent)
215             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
216     });
217     dispatch_semaphore_wait(hasSessionSemaphore.get(), DISPATCH_TIME_FOREVER);
218 }
219 @end
220
221 @interface WebAVSampleBufferErrorListener : NSObject {
222     WebCore::SourceBufferPrivateAVFObjC* _parent;
223     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
224     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
225 }
226
227 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
228 - (void)invalidate;
229 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
230 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
231 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
232 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
233 @end
234
235 @implementation WebAVSampleBufferErrorListener
236
237 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
238 {
239     if (!(self = [super init]))
240         return nil;
241
242     _parent = parent;
243     return self;
244 }
245
246 - (void)dealloc
247 {
248     [self invalidate];
249     [super dealloc];
250 }
251
252 - (void)invalidate
253 {
254     if (!_parent && !_layers.size() && !_renderers.size())
255         return;
256
257     for (auto& layer : _layers) {
258         [layer removeObserver:self forKeyPath:@"error"];
259         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
260     }
261     _layers.clear();
262
263     for (auto& renderer : _renderers)
264         [renderer removeObserver:self forKeyPath:@"error"];
265     _renderers.clear();
266
267     [[NSNotificationCenter defaultCenter] removeObserver:self];
268
269     _parent = nullptr;
270 }
271
272 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
273 {
274     ASSERT(_parent);
275     ASSERT(!_layers.contains(layer));
276
277     _layers.append(layer);
278     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
279     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
280     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
281 }
282
283 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
284 {
285     ASSERT(_parent);
286     ASSERT(_layers.contains(layer));
287
288     [layer removeObserver:self forKeyPath:@"error"];
289     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
290     _layers.remove(_layers.find(layer));
291
292     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
293 }
294
295 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
296 {
297     ASSERT(_parent);
298     ASSERT(!_renderers.contains(renderer));
299
300     _renderers.append(renderer);
301     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
302 }
303
304 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
305 {
306     ASSERT(_parent);
307     ASSERT(_renderers.contains(renderer));
308
309     [renderer removeObserver:self forKeyPath:@"error"];
310     _renderers.remove(_renderers.find(renderer));
311 }
312
313 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
314 {
315     UNUSED_PARAM(context);
316     UNUSED_PARAM(keyPath);
317     ASSERT(_parent);
318
319     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
320     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
321         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
322         ASSERT(_layers.contains(layer.get()));
323
324         if ([keyPath isEqualTo:@"error"]) {
325             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
326             callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
327                 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
328             });
329         } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
330             if ([[change valueForKey:NSKeyValueChangeNewKey] boolValue]) {
331                 RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
332                 callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
333                     protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
334                 });
335             }
336         } else
337             ASSERT_NOT_REACHED();
338
339     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
340         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
341         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
342
343         ASSERT(_renderers.contains(renderer.get()));
344         ASSERT([keyPath isEqualTo:@"error"]);
345
346         callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), error = WTFMove(error)] {
347             protectedSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
348         });
349     } else
350         ASSERT_NOT_REACHED();
351 }
352
353 - (void)layerFailedToDecode:(NSNotification*)note
354 {
355     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
356     RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
357
358     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
359     callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
360         if (!protectedSelf->_parent || !protectedSelf->_layers.contains(layer.get()))
361             return;
362         protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
363     });
364 }
365 @end
366
367 #pragma mark -
368
369 @interface WebBufferConsumedContext : NSObject {
370     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
371 }
372 @property (readonly) WebCore::SourceBufferPrivateAVFObjC* parent;
373 @end
374
375 @implementation WebBufferConsumedContext
376 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
377 {
378     self = [super init];
379     if (self)
380         _parent = parent;
381     return self;
382 }
383
384 @dynamic parent;
385 - (WebCore::SourceBufferPrivateAVFObjC*)parent
386 {
387     return _parent.get();
388 }
389 @end
390
391 namespace WebCore {
392
393 #pragma mark -
394 #pragma mark MediaDescriptionAVFObjC
395
396 class MediaDescriptionAVFObjC final : public MediaDescription {
397 public:
398     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
399     virtual ~MediaDescriptionAVFObjC() { }
400
401     AtomicString codec() const override { return m_codec; }
402     bool isVideo() const override { return m_isVideo; }
403     bool isAudio() const override { return m_isAudio; }
404     bool isText() const override { return m_isText; }
405     
406 protected:
407     MediaDescriptionAVFObjC(AVAssetTrack* track)
408         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
409         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
410         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
411     {
412         NSArray* formatDescriptions = [track formatDescriptions];
413         CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
414         if (description) {
415             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
416             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
417         }
418     }
419
420     AtomicString m_codec;
421     bool m_isVideo;
422     bool m_isAudio;
423     bool m_isText;
424 };
425
426 #pragma mark -
427 #pragma mark SourceBufferPrivateAVFObjC
428
429 static NSString *kBufferConsumedContext = @"BufferConsumedContext";
430
431 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void*, CFStringRef notificationName, const void*, CFTypeRef payload)
432 {
433     if (!isMainThread()) {
434         callOnMainThread([notificationName, payload = retainPtr(payload)] {
435             bufferWasConsumedCallback(nullptr, nullptr, notificationName, nullptr, payload.get());
436         });
437         return;
438     }
439
440     if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
441         return;
442
443     ASSERT(CFGetTypeID(payload) == CFDictionaryGetTypeID());
444     auto context = (WebBufferConsumedContext *)[(NSDictionary *)payload valueForKey:kBufferConsumedContext];
445     if (!context)
446         return;
447
448     if (auto sourceBuffer = context.parent)
449         sourceBuffer->bufferWasConsumed();
450 }
451
452 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
453 {
454     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
455 }
456
457 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
458     : m_weakFactory(this)
459     , m_appendWeakFactory(this)
460     , m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
461     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
462     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
463     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
464     , m_mediaSource(parent)
465 {
466     CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
467 }
468
469 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
470 {
471     ASSERT(!m_client);
472     destroyParser();
473     destroyRenderers();
474
475     CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
476
477     if (m_hasSessionSemaphore)
478         dispatch_semaphore_signal(m_hasSessionSemaphore.get());
479 }
480
481 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
482 {
483     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
484
485     if (!m_mediaSource)
486         return;
487
488     if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
489         for (AVAssetTrack *track in [asset tracks]) {
490             if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
491                 m_parsingSucceeded = false;
492                 return;
493             }
494         }
495     }
496
497     m_asset = asset;
498
499     m_videoTracks.clear();
500     m_audioTracks.clear();
501
502     SourceBufferPrivateClient::InitializationSegment segment;
503
504     if ([m_asset respondsToSelector:@selector(overallDurationHint)])
505         segment.duration = toMediaTime([m_asset overallDurationHint]);
506
507     if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
508         segment.duration = toMediaTime([m_asset duration]);
509
510     for (AVAssetTrack* track in [m_asset tracks]) {
511         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
512             // FIXME(125161): Handle in-band text tracks.
513             continue;
514         }
515
516         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
517             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
518             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
519             info.track = videoTrack;
520             m_videoTracks.append(videoTrack);
521             info.description = MediaDescriptionAVFObjC::create(track);
522             segment.videoTracks.append(info);
523         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
524             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
525             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
526             info.track = audioTrack;
527             m_audioTracks.append(audioTrack);
528             info.description = MediaDescriptionAVFObjC::create(track);
529             segment.audioTracks.append(info);
530         }
531
532         // FIXME(125161): Add TextTrack support
533     }
534
535     if (m_mediaSource)
536         m_mediaSource->player()->characteristicsChanged();
537
538     if (m_client)
539         m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
540 }
541
542 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
543 {
544 #if LOG_DISABLED
545     UNUSED_PARAM(error);
546 #endif
547     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
548
549     m_parsingSucceeded = false;
550 }
551
552 struct ProcessCodedFrameInfo {
553     SourceBufferPrivateAVFObjC* sourceBuffer;
554     int trackID;
555     const String& mediaType;
556 };
557
558 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
559 {
560     processCodedFrame(trackID, sampleBuffer, mediaType);
561 }
562
563 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
564 {
565     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
566         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
567         // will just confuse its state. Drop this sample until we can handle text tracks properly.
568         return false;
569     }
570
571     if (m_client) {
572         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
573         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(mediaSample.get()).utf8().data());
574         m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
575     }
576
577     return true;
578 }
579
580 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
581 {
582     notImplemented();
583 }
584
585 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
586 {
587     if (!m_mediaSource)
588         return;
589
590     ASSERT(m_parser);
591
592 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
593     LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
594     m_protectedTrackID = trackID;
595
596     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
597         session->addParser(m_parser.get());
598     else if (!CDMSessionAVContentKeySession::isAvailable()) {
599         BEGIN_BLOCK_OBJC_EXCEPTIONS;
600         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
601         END_BLOCK_OBJC_EXCEPTIONS;
602     }
603 #else
604     UNUSED_PARAM(trackID);
605 #endif
606 }
607
608 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore)
609 {
610     if (!m_mediaSource)
611         return;
612
613 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
614     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
615     m_protectedTrackID = trackID;
616     RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
617     [initData getBytes:initDataArray->data() length:initDataArray->length()];
618     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
619     if (auto session = m_mediaSource->player()->cdmSession()) {
620         session->addParser(m_parser.get());
621         dispatch_semaphore_signal(hasSessionSemaphore.get());
622     } else {
623         if (m_hasSessionSemaphore)
624             dispatch_semaphore_signal(m_hasSessionSemaphore.get());
625         m_hasSessionSemaphore = hasSessionSemaphore;
626     }
627 #else
628     UNUSED_PARAM(initData);
629     UNUSED_PARAM(trackID);
630     UNUSED_PARAM(hasSessionSemaphore);
631 #endif
632 }
633
634 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
635 {
636     m_client = client;
637 }
638
639 static dispatch_queue_t globalDataParserQueue()
640 {
641     static dispatch_queue_t globalQueue;
642     static dispatch_once_t onceToken;
643     dispatch_once(&onceToken, ^{
644         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
645     });
646     return globalQueue;
647 }
648
649 void SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
650 {
651     LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
652
653     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data length:length]);
654     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr();
655     RetainPtr<AVStreamDataParser> parser = m_parser;
656     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
657
658     m_parsingSucceeded = true;
659     dispatch_group_enter(m_isAppendingGroup.get());
660
661     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
662         if (parserStateWasReset)
663             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
664         else
665             [parser appendStreamData:nsData.get()];
666
667         callOnMainThread([weakThis] {
668             if (weakThis)
669                 weakThis->appendCompleted();
670         });
671         dispatch_group_leave(isAppendingGroup.get());
672     });
673     m_parserStateWasReset = false;
674 }
675
676 void SourceBufferPrivateAVFObjC::appendCompleted()
677 {
678     if (m_parsingSucceeded && m_mediaSource)
679         m_mediaSource->player()->setLoadingProgresssed(true);
680
681     if (m_client)
682         m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
683 }
684
685 void SourceBufferPrivateAVFObjC::abort()
686 {
687     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
688     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
689     // semaphore, the m_isAppendingGroup wait operation will deadlock.
690     if (m_hasSessionSemaphore)
691         dispatch_semaphore_signal(m_hasSessionSemaphore.get());
692     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
693     m_appendWeakFactory.revokeAll();
694     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr();
695 }
696
697 void SourceBufferPrivateAVFObjC::resetParserState()
698 {
699     m_parserStateWasReset = true;
700 }
701
702 void SourceBufferPrivateAVFObjC::destroyParser()
703 {
704 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
705     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
706         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
707 #endif
708
709     [m_delegate invalidate];
710     m_delegate = nullptr;
711     m_parser = nullptr;
712 }
713
714 void SourceBufferPrivateAVFObjC::destroyRenderers()
715 {
716     if (m_displayLayer)
717         setVideoLayer(nullptr);
718
719     if (m_decompressionSession)
720         setDecompressionSession(nullptr);
721
722     for (auto& renderer : m_audioRenderers.values()) {
723         if (m_mediaSource)
724             m_mediaSource->player()->removeAudioRenderer(renderer.get());
725         [renderer flush];
726         [renderer stopRequestingMediaData];
727         [m_errorListener stopObservingRenderer:renderer.get()];
728     }
729
730     m_audioRenderers.clear();
731 }
732
733 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
734 {
735     destroyParser();
736     destroyRenderers();
737
738     if (m_mediaSource)
739         m_mediaSource->removeSourceBuffer(this);
740 }
741
742 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
743 {
744     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
745 }
746
747 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
748 {
749     if (m_mediaSource)
750         m_mediaSource->player()->setReadyState(readyState);
751 }
752
753 bool SourceBufferPrivateAVFObjC::hasVideo() const
754 {
755     return m_client && m_client->sourceBufferPrivateHasVideo();
756 }
757
758 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
759 {
760     return m_enabledVideoTrackID != -1;
761 }
762
763 bool SourceBufferPrivateAVFObjC::hasAudio() const
764 {
765     return m_client && m_client->sourceBufferPrivateHasAudio();
766 }
767
768 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
769 {
770     int trackID = track->trackID();
771     if (!track->selected() && m_enabledVideoTrackID == trackID) {
772         m_enabledVideoTrackID = -1;
773         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
774
775         if (m_decompressionSession)
776             m_decompressionSession->stopRequestingMediaData();
777     } else if (track->selected()) {
778         m_enabledVideoTrackID = trackID;
779         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
780
781         if (m_decompressionSession) {
782             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
783                 didBecomeReadyForMoreSamples(trackID);
784             });
785         }
786     }
787
788     m_mediaSource->hasSelectedVideoChanged(*this);
789 }
790
791 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
792 {
793     int trackID = track->trackID();
794
795     if (!track->enabled()) {
796         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
797         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
798         if (m_mediaSource)
799             m_mediaSource->player()->removeAudioRenderer(renderer.get());
800     } else {
801         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
802         RetainPtr<AVSampleBufferAudioRenderer> renderer;
803         if (!m_audioRenderers.contains(trackID)) {
804             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
805             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
806                 didBecomeReadyForMoreSamples(trackID);
807             }];
808             m_audioRenderers.set(trackID, renderer);
809             [m_errorListener beginObservingRenderer:renderer.get()];
810         } else
811             renderer = m_audioRenderers.get(trackID);
812
813         if (m_mediaSource)
814             m_mediaSource->player()->addAudioRenderer(renderer.get());
815     }
816 }
817
818 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
819 {
820 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
821     if (session == m_session)
822         return;
823
824     if (m_session)
825         m_session->removeSourceBuffer(this);
826
827     m_session = session;
828
829     if (m_session) {
830         m_session->addSourceBuffer(this);
831         if (m_hasSessionSemaphore) {
832             dispatch_semaphore_signal(m_hasSessionSemaphore.get());
833             m_hasSessionSemaphore = nullptr;
834         }
835
836         if (m_hdcpError) {
837             WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
838             callOnMainThread([weakThis] {
839                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
840                     return;
841
842                 bool ignored = false;
843                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
844             });
845         }
846     }
847 #else
848     UNUSED_PARAM(session);
849 #endif
850 }
851
852 void SourceBufferPrivateAVFObjC::flush()
853 {
854     flushVideo();
855
856     for (auto& renderer : m_audioRenderers.values())
857         flush(renderer.get());
858 }
859
860 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
861 {
862     ASSERT(!m_errorClients.contains(client));
863     m_errorClients.append(client);
864 }
865
866 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
867 {
868     ASSERT(m_errorClients.contains(client));
869     m_errorClients.remove(m_errorClients.find(client));
870 }
871
872 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
873 {
874     LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
875
876     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
877     bool anyIgnored = false;
878     for (auto& client : m_errorClients) {
879         bool shouldIgnore = false;
880         client->layerDidReceiveError(layer, error, shouldIgnore);
881         anyIgnored |= shouldIgnore;
882     }
883     if (anyIgnored)
884         return;
885
886     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
887
888     if (m_client)
889         m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
890 }
891
892 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
893 {
894     LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
895
896     if ([error code] == 'HDCP')
897         m_hdcpError = error;
898
899     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
900     bool anyIgnored = false;
901     for (auto& client : m_errorClients) {
902         bool shouldIgnore = false;
903         client->rendererDidReceiveError(renderer, error, shouldIgnore);
904         anyIgnored |= shouldIgnore;
905     }
906     if (anyIgnored)
907         return;
908 }
909
910 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
911 {
912     int trackID = trackIDString.toInt();
913     LOG(MediaSource, "SourceBufferPrivateAVFObjC::flush(%p) - trackId: %d", this, trackID);
914
915     if (trackID == m_enabledVideoTrackID) {
916         flushVideo();
917     } else if (m_audioRenderers.contains(trackID))
918         flush(m_audioRenderers.get(trackID).get());
919 }
920
921 void SourceBufferPrivateAVFObjC::flushVideo()
922 {
923     [m_displayLayer flush];
924
925     if (m_decompressionSession) {
926         m_decompressionSession->flush();
927         m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
928             if (weakThis && weakThis->m_mediaSource)
929                 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
930         });
931     }
932
933     m_cachedSize = std::nullopt;
934
935     if (m_mediaSource) {
936         m_mediaSource->player()->setHasAvailableVideoFrame(false);
937         m_mediaSource->player()->flushPendingSizeChanges();
938     }
939 }
940
941 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
942 {
943     [renderer flush];
944
945     if (m_mediaSource)
946         m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
947 }
948
949 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
950 {
951     int trackID = trackIDString.toInt();
952     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
953         return;
954
955     PlatformSample platformSample = sample->platformSample();
956     if (platformSample.type != PlatformSample::CMSampleBufferType)
957         return;
958
959     LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(sample.get()).utf8().data());
960
961     if (trackID == m_enabledVideoTrackID) {
962         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
963         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
964         if (!m_cachedSize || formatSize != m_cachedSize.value()) {
965             LOG(MediaSource, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
966             bool sizeWasNull = !m_cachedSize;
967             m_cachedSize = formatSize;
968             if (m_mediaSource) {
969                 if (sizeWasNull)
970                     m_mediaSource->player()->setNaturalSize(formatSize);
971                 else
972                     m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
973             }
974         }
975
976         if (m_decompressionSession)
977             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
978
979         if (m_displayLayer) {
980             if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
981                 auto context = adoptNS([[WebBufferConsumedContext alloc] initWithParent:createWeakPtr()]);
982                 CMSampleBufferRef rawSampleCopy;
983                 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
984                 auto sampleCopy = adoptCF(rawSampleCopy);
985                 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, @{kBufferConsumedContext: context.get()}, kCMAttachmentMode_ShouldNotPropagate);
986                 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
987             } else
988                 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
989         }
990     } else {
991         auto renderer = m_audioRenderers.get(trackID);
992         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
993         if (m_mediaSource && !sample->isNonDisplaying())
994             m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
995     }
996 }
997
998 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
999 {
1000     if (m_mediaSource)
1001         m_mediaSource->player()->setHasAvailableVideoFrame(true);
1002 }
1003
1004 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1005 {
1006     int trackID = trackIDString.toInt();
1007     if (trackID == m_enabledVideoTrackID) {
1008         if (m_decompressionSession)
1009             return m_decompressionSession->isReadyForMoreMediaData();
1010
1011         return [m_displayLayer isReadyForMoreMediaData];
1012     }
1013
1014     if (m_audioRenderers.contains(trackID))
1015         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1016
1017     return false;
1018 }
1019
1020 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1021 {
1022     if (m_mediaSource)
1023         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1024 }
1025
1026 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
1027 {
1028     if (!m_client)
1029         return time;
1030     return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1031 }
1032
1033 void SourceBufferPrivateAVFObjC::willSeek()
1034 {
1035     flush();
1036 }
1037
1038 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
1039 {
1040     if (m_client)
1041         m_client->sourceBufferPrivateSeekToTime(time);
1042 }
1043
1044 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1045 {
1046     return m_cachedSize.value_or(FloatSize());
1047 }
1048
1049 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1050 {
1051     LOG(Media, "SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(%p) - track(%d)", this, trackID);
1052     if (trackID == m_enabledVideoTrackID) {
1053         if (m_decompressionSession)
1054             m_decompressionSession->stopRequestingMediaData();
1055         [m_displayLayer stopRequestingMediaData];
1056     } else if (m_audioRenderers.contains(trackID))
1057         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1058     else
1059         return;
1060
1061     if (m_client)
1062         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1063 }
1064
1065 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1066 {
1067     int trackID = trackIDString.toInt();
1068     if (trackID == m_enabledVideoTrackID) {
1069         if (m_decompressionSession) {
1070             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1071                 didBecomeReadyForMoreSamples(trackID);
1072             });
1073         }
1074         if (m_displayLayer) {
1075             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1076                 didBecomeReadyForMoreSamples(trackID);
1077             }];
1078         }
1079     } else if (m_audioRenderers.contains(trackID)) {
1080         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1081             didBecomeReadyForMoreSamples(trackID);
1082         }];
1083     }
1084 }
1085
1086 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1087 {
1088     if (layer == m_displayLayer)
1089         return;
1090
1091     ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1092
1093     if (m_displayLayer) {
1094         [m_displayLayer flush];
1095         [m_displayLayer stopRequestingMediaData];
1096         [m_errorListener stopObservingLayer:m_displayLayer.get()];
1097     }
1098
1099     m_displayLayer = layer;
1100
1101     if (m_displayLayer) {
1102         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1103             didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1104         }];
1105         [m_errorListener beginObservingLayer:m_displayLayer.get()];
1106         if (m_client)
1107             m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1108     }
1109 }
1110
1111 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1112 {
1113     if (m_decompressionSession == decompressionSession)
1114         return;
1115
1116     if (m_decompressionSession) {
1117         m_decompressionSession->stopRequestingMediaData();
1118         m_decompressionSession->invalidate();
1119     }
1120
1121     m_decompressionSession = decompressionSession;
1122
1123     if (!m_decompressionSession)
1124         return;
1125
1126     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1127     m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1128         if (weakThis)
1129             weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1130     });
1131     m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1132         if (weakThis && weakThis->m_mediaSource)
1133             weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1134     });
1135     if (m_client)
1136         m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1137 }
1138
1139 }
1140
1141 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)