Rename ENABLE_ENCRYPTED_MEDIA_V2 to ENABLE_LEGACY_ENCRYPTED_MEDIA
[WebKit.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVFoundationSPI.h"
32 #import "CDMSessionAVContentKeySession.h"
33 #import "CDMSessionMediaSourceAVFObjC.h"
34 #import "ExceptionCodePlaceholder.h"
35 #import "Logging.h"
36 #import "MediaDescription.h"
37 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
38 #import "MediaSample.h"
39 #import "MediaSampleAVFObjC.h"
40 #import "MediaSourcePrivateAVFObjC.h"
41 #import "MediaTimeAVFoundation.h"
42 #import "NotImplemented.h"
43 #import "SoftLinking.h"
44 #import "SourceBufferPrivateClient.h"
45 #import "TimeRanges.h"
46 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
47 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
48 #import "InbandTextTrackPrivateAVFObjC.h"
49 #import <AVFoundation/AVAssetTrack.h>
50 #import <QuartzCore/CALayer.h>
51 #import <objc/runtime.h>
52 #import <wtf/text/AtomicString.h>
53 #import <wtf/text/CString.h>
54 #import <wtf/BlockObjCExceptions.h>
55 #import <wtf/HashCountedSet.h>
56 #import <wtf/MainThread.h>
57 #import <wtf/WeakPtr.h>
58 #import <map>
59
60 #pragma mark - Soft Linking
61
62 #import "CoreMediaSoftLink.h"
63
64 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
65
66 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
67 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
68 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
69 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
70 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
71
72 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
73 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
74 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
75
76 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
77 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
81
82 #define AVMediaTypeVideo getAVMediaTypeVideo()
83 #define AVMediaTypeAudio getAVMediaTypeAudio()
84 #define AVMediaTypeText getAVMediaTypeText()
85 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
86 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
87
88 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
89 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
90 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
91
92 #pragma mark -
93 #pragma mark AVStreamSession
94
95 @interface AVStreamSession : NSObject
96 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
97 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
98 @end
99
100 #pragma mark -
101 #pragma mark AVSampleBufferDisplayLayer
102
103 @interface AVSampleBufferDisplayLayer : CALayer
104 - (NSInteger)status;
105 - (NSError*)error;
106 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
107 - (void)flush;
108 - (void)flushAndRemoveImage;
109 - (BOOL)isReadyForMoreMediaData;
110 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
111 - (void)stopRequestingMediaData;
112 @end
113
114 #pragma mark -
115 #pragma mark AVSampleBufferAudioRenderer
116
117 @interface AVSampleBufferAudioRenderer : NSObject
118 - (NSInteger)status;
119 - (NSError*)error;
120 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
121 - (void)flush;
122 - (BOOL)isReadyForMoreMediaData;
123 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
124 - (void)stopRequestingMediaData;
125 @end
126
127 #pragma mark -
128 #pragma mark WebAVStreamDataParserListener
129
130 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
131     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
132     AVStreamDataParser* _parser;
133 }
134 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
135 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
136 @end
137
138 @implementation WebAVStreamDataParserListener
139 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
140 {
141     self = [super init];
142     if (!self)
143         return nil;
144
145     ASSERT(parent);
146     _parent = parent;
147     _parser = parser;
148     [_parser setDelegate:self];
149     return self;
150 }
151
152 @synthesize parent=_parent;
153
154 - (void)dealloc
155 {
156     [_parser setDelegate:nil];
157     [super dealloc];
158 }
159
160 - (void)invalidate
161 {
162     [_parser setDelegate:nil];
163     _parser = nullptr;
164 }
165
166 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
167 {
168 #if ASSERT_DISABLED
169     UNUSED_PARAM(streamDataParser);
170 #endif
171     ASSERT(streamDataParser == _parser);
172
173     RetainPtr<AVAsset*> protectedAsset = asset;
174     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
175         if (parent)
176             parent->didParseStreamDataAsAsset(protectedAsset.get());
177     });
178 }
179
180 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
181 {
182     UNUSED_PARAM(discontinuity);
183 #if ASSERT_DISABLED
184     UNUSED_PARAM(streamDataParser);
185 #endif
186     ASSERT(streamDataParser == _parser);
187
188     RetainPtr<AVAsset*> protectedAsset = asset;
189     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
190         if (parent)
191             parent->didParseStreamDataAsAsset(protectedAsset.get());
192     });
193 }
194
195 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
196 {
197 #if ASSERT_DISABLED
198     UNUSED_PARAM(streamDataParser);
199 #endif
200     ASSERT(streamDataParser == _parser);
201
202     RetainPtr<NSError> protectedError = error;
203     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
204         if (parent)
205             parent->didFailToParseStreamDataWithError(protectedError.get());
206     });
207 }
208
209 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
210 {
211 #if ASSERT_DISABLED
212     UNUSED_PARAM(streamDataParser);
213 #endif
214     ASSERT(streamDataParser == _parser);
215
216     RetainPtr<CMSampleBufferRef> protectedSample = sample;
217     String mediaType = nsMediaType;
218     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType, flags] {
219         if (parent)
220             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
221     });
222 }
223
224 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
225 {
226 #if ASSERT_DISABLED
227     UNUSED_PARAM(streamDataParser);
228 #endif
229     ASSERT(streamDataParser == _parser);
230
231     String mediaType = nsMediaType;
232     callOnMainThread([parent = _parent, trackID, mediaType] {
233         if (parent)
234             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
235     });
236 }
237
238 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
239 {
240 #if ASSERT_DISABLED
241     UNUSED_PARAM(streamDataParser);
242 #endif
243     ASSERT(streamDataParser == _parser);
244
245     // We must call synchronously to the main thread, as the AVStreamSession must be associated
246     // with the streamDataParser before the delegate method returns.
247     dispatch_sync(dispatch_get_main_queue(), [parent = _parent, trackID]() {
248         if (parent)
249             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
250     });
251 }
252
253 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
254 {
255 #if ASSERT_DISABLED
256     UNUSED_PARAM(streamDataParser);
257 #endif
258     ASSERT(streamDataParser == _parser);
259
260     OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore = adoptOSObject(dispatch_semaphore_create(0));
261     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
262         if (parent)
263             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
264     });
265     dispatch_semaphore_wait(hasSessionSemaphore.get(), DISPATCH_TIME_FOREVER);
266 }
267 @end
268
269 @interface WebAVSampleBufferErrorListener : NSObject {
270     WebCore::SourceBufferPrivateAVFObjC* _parent;
271     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
272     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
273 }
274
275 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
276 - (void)invalidate;
277 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
278 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
279 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
280 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
281 @end
282
283 @implementation WebAVSampleBufferErrorListener
284
285 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
286 {
287     if (!(self = [super init]))
288         return nil;
289
290     _parent = parent;
291     return self;
292 }
293
294 - (void)dealloc
295 {
296     [self invalidate];
297     [super dealloc];
298 }
299
300 - (void)invalidate
301 {
302     if (!_parent && !_layers.size() && !_renderers.size())
303         return;
304
305     for (auto& layer : _layers) {
306         [layer removeObserver:self forKeyPath:@"error"];
307         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
308     }
309     _layers.clear();
310
311     for (auto& renderer : _renderers)
312         [renderer removeObserver:self forKeyPath:@"error"];
313     _renderers.clear();
314
315     [[NSNotificationCenter defaultCenter] removeObserver:self];
316
317     _parent = nullptr;
318 }
319
320 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
321 {
322     ASSERT(_parent);
323     ASSERT(!_layers.contains(layer));
324
325     _layers.append(layer);
326     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
327     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
328     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
329 }
330
331 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
332 {
333     ASSERT(_parent);
334     ASSERT(_layers.contains(layer));
335
336     [layer removeObserver:self forKeyPath:@"error"];
337     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
338     _layers.remove(_layers.find(layer));
339
340     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
341 }
342
343 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
344 {
345     ASSERT(_parent);
346     ASSERT(!_renderers.contains(renderer));
347
348     _renderers.append(renderer);
349     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
350 }
351
352 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
353 {
354     ASSERT(_parent);
355     ASSERT(_renderers.contains(renderer));
356
357     [renderer removeObserver:self forKeyPath:@"error"];
358     _renderers.remove(_renderers.find(renderer));
359 }
360
361 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
362 {
363     UNUSED_PARAM(context);
364     UNUSED_PARAM(keyPath);
365     ASSERT(_parent);
366
367     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
368     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
369         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
370         ASSERT(_layers.contains(layer.get()));
371
372         if ([keyPath isEqualTo:@"error"]) {
373             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
374             callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
375                 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
376             });
377         } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
378             if ([[change valueForKey:NSKeyValueChangeNewKey] boolValue]) {
379                 RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
380                 callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
381                     protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
382                 });
383             }
384         } else
385             ASSERT_NOT_REACHED();
386
387     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
388         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
389         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
390
391         ASSERT(_renderers.contains(renderer.get()));
392         ASSERT([keyPath isEqualTo:@"error"]);
393
394         callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), error = WTFMove(error)] {
395             protectedSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
396         });
397     } else
398         ASSERT_NOT_REACHED();
399 }
400
401 - (void)layerFailedToDecode:(NSNotification*)note
402 {
403     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
404     RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
405
406     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
407     callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
408         if (!protectedSelf->_parent || !protectedSelf->_layers.contains(layer.get()))
409             return;
410         protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
411     });
412 }
413 @end
414
415 namespace WebCore {
416
417 #pragma mark -
418 #pragma mark MediaDescriptionAVFObjC
419
420 class MediaDescriptionAVFObjC final : public MediaDescription {
421 public:
422     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
423     virtual ~MediaDescriptionAVFObjC() { }
424
425     AtomicString codec() const override { return m_codec; }
426     bool isVideo() const override { return m_isVideo; }
427     bool isAudio() const override { return m_isAudio; }
428     bool isText() const override { return m_isText; }
429     
430 protected:
431     MediaDescriptionAVFObjC(AVAssetTrack* track)
432         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
433         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
434         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
435     {
436         NSArray* formatDescriptions = [track formatDescriptions];
437         CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
438         if (description) {
439             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
440             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
441         }
442     }
443
444     AtomicString m_codec;
445     bool m_isVideo;
446     bool m_isAudio;
447     bool m_isText;
448 };
449
450 #pragma mark -
451 #pragma mark SourceBufferPrivateAVFObjC
452
453 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
454 {
455     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
456 }
457
458 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
459     : m_weakFactory(this)
460     , m_appendWeakFactory(this)
461     , m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
462     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
463     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
464     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
465     , m_mediaSource(parent)
466     , m_client(0)
467     , m_parsingSucceeded(true)
468     , m_enabledVideoTrackID(-1)
469     , m_protectedTrackID(-1)
470 {
471 }
472
473 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
474 {
475     ASSERT(!m_client);
476     destroyParser();
477     destroyRenderers();
478
479     if (m_hasSessionSemaphore)
480         dispatch_semaphore_signal(m_hasSessionSemaphore.get());
481 }
482
483 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
484 {
485     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
486
487     m_asset = asset;
488
489     m_videoTracks.clear();
490     m_audioTracks.clear();
491
492     SourceBufferPrivateClient::InitializationSegment segment;
493     segment.duration = toMediaTime([m_asset duration]);
494
495     for (AVAssetTrack* track in [m_asset tracks]) {
496         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
497             // FIXME(125161): Handle in-band text tracks.
498             continue;
499         }
500
501         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
502             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
503             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
504             info.track = videoTrack;
505             m_videoTracks.append(videoTrack);
506             info.description = MediaDescriptionAVFObjC::create(track);
507             segment.videoTracks.append(info);
508         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
509             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
510             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
511             info.track = audioTrack;
512             m_audioTracks.append(audioTrack);
513             info.description = MediaDescriptionAVFObjC::create(track);
514             segment.audioTracks.append(info);
515         }
516
517         // FIXME(125161): Add TextTrack support
518     }
519
520     if (m_mediaSource)
521         m_mediaSource->player()->characteristicsChanged();
522
523     if (m_client)
524         m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
525 }
526
527 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
528 {
529 #if LOG_DISABLED
530     UNUSED_PARAM(error);
531 #endif
532     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
533
534     m_parsingSucceeded = false;
535 }
536
537 struct ProcessCodedFrameInfo {
538     SourceBufferPrivateAVFObjC* sourceBuffer;
539     int trackID;
540     const String& mediaType;
541 };
542
543 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
544 {
545     UNUSED_PARAM(flags);
546
547     processCodedFrame(trackID, sampleBuffer, mediaType);
548 }
549
550 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
551 {
552     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
553         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
554         // will just confuse its state. Drop this sample until we can handle text tracks properly.
555         return false;
556     }
557
558     if (m_client) {
559         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
560         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(mediaSample.get()).utf8().data());
561         m_client->sourceBufferPrivateDidReceiveSample(this, mediaSample);
562     }
563
564     return true;
565 }
566
567 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
568 {
569     UNUSED_PARAM(mediaType);
570     UNUSED_PARAM(trackID);
571     notImplemented();
572 }
573
574 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
575 {
576     if (!m_mediaSource)
577         return;
578
579     ASSERT(m_parser);
580
581 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
582     LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
583     m_protectedTrackID = trackID;
584
585     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
586         session->addParser(m_parser.get());
587     else if (!CDMSessionAVContentKeySession::isAvailable()) {
588         BEGIN_BLOCK_OBJC_EXCEPTIONS;
589         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
590         END_BLOCK_OBJC_EXCEPTIONS;
591     }
592 #else
593     UNUSED_PARAM(trackID);
594 #endif
595 }
596
597 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore)
598 {
599     if (!m_mediaSource)
600         return;
601
602     UNUSED_PARAM(trackID);
603 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
604     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
605     m_protectedTrackID = trackID;
606     RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
607     [initData getBytes:initDataArray->data() length:initDataArray->length()];
608     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
609     if (auto session = m_mediaSource->player()->cdmSession()) {
610         session->addParser(m_parser.get());
611         dispatch_semaphore_signal(hasSessionSemaphore.get());
612     } else {
613         if (m_hasSessionSemaphore)
614             dispatch_semaphore_signal(m_hasSessionSemaphore.get());
615         m_hasSessionSemaphore = hasSessionSemaphore;
616     }
617 #else
618     UNUSED_PARAM(initData);
619 #endif
620 }
621
622 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
623 {
624     m_client = client;
625 }
626
627 static dispatch_queue_t globalDataParserQueue()
628 {
629     static dispatch_queue_t globalQueue;
630     static dispatch_once_t onceToken;
631     dispatch_once(&onceToken, ^{
632         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
633     });
634     return globalQueue;
635 }
636
637 void SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
638 {
639     LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
640
641     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data length:length]);
642     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr();
643     RetainPtr<AVStreamDataParser> parser = m_parser;
644     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
645
646     m_parsingSucceeded = true;
647     dispatch_group_enter(m_isAppendingGroup.get());
648
649     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
650         if (parserStateWasReset)
651             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
652         else
653             [parser appendStreamData:nsData.get()];
654
655         callOnMainThread([weakThis] {
656             if (weakThis)
657                 weakThis->appendCompleted();
658         });
659         dispatch_group_leave(isAppendingGroup.get());
660     });
661     m_parserStateWasReset = false;
662 }
663
664 void SourceBufferPrivateAVFObjC::appendCompleted()
665 {
666     if (m_parsingSucceeded && m_mediaSource)
667         m_mediaSource->player()->setLoadingProgresssed(true);
668
669     if (m_client)
670         m_client->sourceBufferPrivateAppendComplete(this, m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
671 }
672
673 void SourceBufferPrivateAVFObjC::abort()
674 {
675     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
676     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
677     // semaphore, the m_isAppendingGroup wait operation will deadlock.
678     if (m_hasSessionSemaphore)
679         dispatch_semaphore_signal(m_hasSessionSemaphore.get());
680     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
681     m_appendWeakFactory.revokeAll();
682     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr();
683 }
684
685 void SourceBufferPrivateAVFObjC::resetParserState()
686 {
687     m_parserStateWasReset = true;
688 }
689
690 void SourceBufferPrivateAVFObjC::destroyParser()
691 {
692     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
693         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
694
695     [m_delegate invalidate];
696     m_delegate = nullptr;
697     m_parser = nullptr;
698 }
699
700 void SourceBufferPrivateAVFObjC::destroyRenderers()
701 {
702     if (m_displayLayer) {
703         if (m_mediaSource)
704             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
705         [m_displayLayer flush];
706         [m_displayLayer stopRequestingMediaData];
707         [m_errorListener stopObservingLayer:m_displayLayer.get()];
708         m_displayLayer = nullptr;
709     }
710
711     for (auto& renderer : m_audioRenderers.values()) {
712         if (m_mediaSource)
713             m_mediaSource->player()->removeAudioRenderer(renderer.get());
714         [renderer flush];
715         [renderer stopRequestingMediaData];
716         [m_errorListener stopObservingRenderer:renderer.get()];
717     }
718
719     m_audioRenderers.clear();
720 }
721
722 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
723 {
724     destroyParser();
725     destroyRenderers();
726
727     if (m_mediaSource)
728         m_mediaSource->removeSourceBuffer(this);
729 }
730
731 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
732 {
733     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
734 }
735
736 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
737 {
738     if (m_mediaSource)
739         m_mediaSource->player()->setReadyState(readyState);
740 }
741
742 bool SourceBufferPrivateAVFObjC::hasVideo() const
743 {
744     if (!m_client)
745         return false;
746
747     return m_client->sourceBufferPrivateHasVideo(this);
748 }
749
750 bool SourceBufferPrivateAVFObjC::hasAudio() const
751 {
752     if (!m_client)
753         return false;
754
755     return m_client->sourceBufferPrivateHasAudio(this);
756 }
757
758 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
759 {
760     int trackID = track->trackID();
761     if (!track->selected() && m_enabledVideoTrackID == trackID) {
762         m_enabledVideoTrackID = -1;
763         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
764         if (m_mediaSource)
765             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
766     } else if (track->selected()) {
767         m_enabledVideoTrackID = trackID;
768         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
769         if (!m_displayLayer) {
770             m_displayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
771 #ifndef NDEBUG
772             [m_displayLayer setName:@"SourceBufferPrivateAVFObjC AVSampleBufferDisplayLayer"];
773 #endif
774             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
775                 didBecomeReadyForMoreSamples(trackID);
776             }];
777             [m_errorListener beginObservingLayer:m_displayLayer.get()];
778         }
779         if (m_mediaSource)
780             m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
781     }
782 }
783
784 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
785 {
786     int trackID = track->trackID();
787
788     if (!track->enabled()) {
789         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
790         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
791         if (m_mediaSource)
792             m_mediaSource->player()->removeAudioRenderer(renderer.get());
793     } else {
794         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
795         RetainPtr<AVSampleBufferAudioRenderer> renderer;
796         if (!m_audioRenderers.contains(trackID)) {
797             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
798             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
799                 didBecomeReadyForMoreSamples(trackID);
800             }];
801             m_audioRenderers.set(trackID, renderer);
802             [m_errorListener beginObservingRenderer:renderer.get()];
803         } else
804             renderer = m_audioRenderers.get(trackID);
805
806         if (m_mediaSource)
807             m_mediaSource->player()->addAudioRenderer(renderer.get());
808     }
809 }
810
811 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
812 {
813     if (session == m_session)
814         return;
815
816     if (m_session)
817         m_session->removeSourceBuffer(this);
818
819     m_session = session;
820
821     if (m_session) {
822         m_session->addSourceBuffer(this);
823         if (m_hasSessionSemaphore) {
824             dispatch_semaphore_signal(m_hasSessionSemaphore.get());
825             m_hasSessionSemaphore = nullptr;
826         }
827
828         if (m_hdcpError) {
829             WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
830             callOnMainThread([weakThis] {
831                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
832                     return;
833
834                 bool ignored = false;
835                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
836             });
837         }
838     }
839 }
840
841 void SourceBufferPrivateAVFObjC::flush()
842 {
843     if (m_displayLayer)
844         [m_displayLayer flushAndRemoveImage];
845
846     for (auto& renderer : m_audioRenderers.values())
847         [renderer flush];
848 }
849
850 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
851 {
852     ASSERT(!m_errorClients.contains(client));
853     m_errorClients.append(client);
854 }
855
856 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
857 {
858     ASSERT(m_errorClients.contains(client));
859     m_errorClients.remove(m_errorClients.find(client));
860 }
861
862 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
863 {
864     LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
865
866     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
867     bool anyIgnored = false;
868     for (auto& client : m_errorClients) {
869         bool shouldIgnore = false;
870         client->layerDidReceiveError(layer, error, shouldIgnore);
871         anyIgnored |= shouldIgnore;
872     }
873     if (anyIgnored)
874         return;
875
876     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
877
878     if (m_client)
879         m_client->sourceBufferPrivateDidReceiveRenderingError(this, errorCode);
880 }
881
882 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
883 {
884     LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
885
886     if ([error code] == 'HDCP')
887         m_hdcpError = error;
888
889     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
890     bool anyIgnored = false;
891     for (auto& client : m_errorClients) {
892         bool shouldIgnore = false;
893         client->rendererDidReceiveError(renderer, error, shouldIgnore);
894         anyIgnored |= shouldIgnore;
895     }
896     if (anyIgnored)
897         return;
898 }
899
900 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
901 {
902     CMSampleBufferRef newSampleBuffer = 0;
903     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
904     if (!newSampleBuffer)
905         return sampleBuffer;
906
907     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
908     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
909         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
910         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
911     }
912
913     return adoptCF(newSampleBuffer);
914 }
915
916 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
917 {
918     int trackID = trackIDString.toInt();
919     LOG(MediaSource, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
920
921     if (trackID == m_enabledVideoTrackID)
922         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
923     else if (m_audioRenderers.contains(trackID))
924         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers.get(trackID).get());
925 }
926
927 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
928 {
929     [renderer flush];
930
931     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
932         RefPtr<MediaSample>& mediaSample = *it;
933
934         PlatformSample platformSample = mediaSample->platformSample();
935         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
936
937         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
938
939         [renderer enqueueSampleBuffer:sampleBuffer.get()];
940     }
941 }
942
943 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
944 {
945     [layer flush];
946
947     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
948         RefPtr<MediaSample>& mediaSample = *it;
949
950         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
951
952         PlatformSample platformSample = mediaSample->platformSample();
953         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
954
955         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
956
957         [layer enqueueSampleBuffer:sampleBuffer.get()];
958     }
959
960     if (m_mediaSource) {
961         m_mediaSource->player()->setHasAvailableVideoFrame(false);
962         m_mediaSource->player()->flushPendingSizeChanges();
963     }
964 }
965
966 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
967 {
968     int trackID = trackIDString.toInt();
969     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
970         return;
971
972     RefPtr<MediaSample> mediaSample = prpMediaSample;
973
974     PlatformSample platformSample = mediaSample->platformSample();
975     if (platformSample.type != PlatformSample::CMSampleBufferType)
976         return;
977
978     LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
979
980     if (trackID == m_enabledVideoTrackID) {
981         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
982         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
983         if (formatSize != m_cachedSize) {
984             LOG(MediaSource, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
985             m_cachedSize = formatSize;
986             if (m_mediaSource)
987                 m_mediaSource->player()->sizeWillChangeAtTime(mediaSample->presentationTime(), formatSize);
988         }
989
990         [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
991         if (m_mediaSource)
992             m_mediaSource->player()->setHasAvailableVideoFrame(true);
993     } else
994         [m_audioRenderers.get(trackID) enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
995 }
996
997 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
998 {
999     int trackID = trackIDString.toInt();
1000     if (trackID == m_enabledVideoTrackID)
1001         return [m_displayLayer isReadyForMoreMediaData];
1002     else if (m_audioRenderers.contains(trackID))
1003         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1004     else
1005         ASSERT_NOT_REACHED();
1006
1007     return false;
1008 }
1009
1010 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1011 {
1012     if (m_mediaSource)
1013         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1014 }
1015
1016 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
1017 {
1018     if (m_client)
1019         return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
1020     return time;
1021 }
1022
1023 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
1024 {
1025     if (m_client)
1026         m_client->sourceBufferPrivateSeekToTime(this, time);
1027 }
1028
1029 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1030 {
1031     return m_cachedSize;
1032 }
1033
1034 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1035 {
1036     if (trackID == m_enabledVideoTrackID)
1037         [m_displayLayer stopRequestingMediaData];
1038     else if (m_audioRenderers.contains(trackID))
1039         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1040     else {
1041         ASSERT_NOT_REACHED();
1042         return;
1043     }
1044
1045     if (m_client)
1046         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
1047 }
1048
1049 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
1050 {
1051     int trackID = trackIDString.toInt();
1052     if (trackID == m_enabledVideoTrackID) {
1053         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
1054             didBecomeReadyForMoreSamples(trackID);
1055         }];
1056     } else if (m_audioRenderers.contains(trackID)) {
1057         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
1058             didBecomeReadyForMoreSamples(trackID);
1059         }];
1060     } else
1061         ASSERT_NOT_REACHED();
1062 }
1063
1064 }
1065
1066 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)