[MSE] Adopt new AVSampleBufferDisplayLayer SPI
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMFairPlayStreaming.h"
34 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
35 #import "CDMSessionAVContentKeySession.h"
36 #import "CDMSessionMediaSourceAVFObjC.h"
37 #import "InbandTextTrackPrivateAVFObjC.h"
38 #import "Logging.h"
39 #import "MediaDescription.h"
40 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
41 #import "MediaSample.h"
42 #import "MediaSampleAVFObjC.h"
43 #import "MediaSourcePrivateAVFObjC.h"
44 #import "NotImplemented.h"
45 #import "SharedBuffer.h"
46 #import "SourceBufferPrivateClient.h"
47 #import "TimeRanges.h"
48 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
49 #import "WebCoreDecompressionSession.h"
50 #import <AVFoundation/AVAssetTrack.h>
51 #import <JavaScriptCore/TypedArrayInlines.h>
52 #import <QuartzCore/CALayer.h>
53 #import <objc/runtime.h>
54 #import <pal/avfoundation/MediaTimeAVFoundation.h>
55 #import <pal/spi/mac/AVFoundationSPI.h>
56 #import <wtf/BlockObjCExceptions.h>
57 #import <wtf/HashCountedSet.h>
58 #import <wtf/MainThread.h>
59 #import <wtf/SoftLinking.h>
60 #import <wtf/WTFSemaphore.h>
61 #import <wtf/WeakPtr.h>
62 #import <wtf/text/AtomicString.h>
63 #import <wtf/text/CString.h>
64
65 #pragma mark - Soft Linking
66
67 #import <pal/cf/CoreMediaSoftLink.h>
68
69 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
70
71 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
72 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
73 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
74 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
75 ALLOW_NEW_API_WITHOUT_GUARDS_END
76 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
77 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
82 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
83 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
84
85 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
86 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
87
88 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
89 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
90 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
91
92 @interface AVSampleBufferDisplayLayer (WebCoreAVSampleBufferDisplayLayerQueueManagementPrivate)
93 - (void)prerollDecodeWithCompletionHandler:(void (^)(BOOL success))block;
94 @end
95
96 #pragma mark -
97 #pragma mark AVStreamSession
98
99 @interface AVStreamSession : NSObject
100 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
101 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
102 @end
103
104 #pragma mark -
105 #pragma mark WebAVStreamDataParserListener
106
107 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
108     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
109     Box<Semaphore> _abortSemaphore;
110     AVStreamDataParser* _parser;
111 }
112 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
113 @property (assign) Box<Semaphore> abortSemaphore;
114 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
115 @end
116
117 @implementation WebAVStreamDataParserListener
118 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
119 {
120     self = [super init];
121     if (!self)
122         return nil;
123
124     ASSERT(parent);
125     _parent = parent;
126     _parser = parser;
127     [_parser setDelegate:self];
128     return self;
129 }
130
131 @synthesize parent=_parent;
132 @synthesize abortSemaphore=_abortSemaphore;
133
134 - (void)dealloc
135 {
136     [_parser setDelegate:nil];
137     [super dealloc];
138 }
139
140 - (void)invalidate
141 {
142     [_parser setDelegate:nil];
143     _parser = nullptr;
144 }
145
146 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
147 {
148     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
149
150     RetainPtr<AVAsset> protectedAsset = asset;
151     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
152         if (parent)
153             parent->didParseStreamDataAsAsset(protectedAsset.get());
154     });
155 }
156
157 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
158 {
159     UNUSED_PARAM(discontinuity);
160     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
161
162     RetainPtr<AVAsset> protectedAsset = asset;
163     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
164         if (parent)
165             parent->didParseStreamDataAsAsset(protectedAsset.get());
166     });
167 }
168
169 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
170 {
171     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
172
173     RetainPtr<NSError> protectedError = error;
174     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
175         if (parent)
176             parent->didFailToParseStreamDataWithError(protectedError.get());
177     });
178 }
179
180 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
181 {
182     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
183
184     RetainPtr<CMSampleBufferRef> protectedSample = sample;
185     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
186         if (parent)
187             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
188     });
189 }
190
191 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
192 {
193     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
194
195     callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
196         if (parent)
197             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
198     });
199 }
200
201 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
202 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
203 IGNORE_WARNINGS_END
204 {
205     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
206
207     // We must call synchronously to the main thread, as the AVStreamSession must be associated
208     // with the streamDataParser before the delegate method returns.
209     Box<BinarySemaphore> respondedSemaphore = Box<BinarySemaphore>::create();
210     callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
211         if (parent)
212             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
213         respondedSemaphore->signal();
214     });
215
216     while (true) {
217         if (respondedSemaphore->waitFor(100_ms))
218             return;
219
220         if (_abortSemaphore->waitFor(100_ms)) {
221             _abortSemaphore->signal();
222             return;
223         }
224     }
225 }
226
227 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
228 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
229 IGNORE_WARNINGS_END
230 {
231     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
232
233     Box<BinarySemaphore> hasSessionSemaphore = Box<BinarySemaphore>::create();
234     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
235         if (parent)
236             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
237     });
238
239     while (true) {
240         if (hasSessionSemaphore->waitFor(100_ms))
241             return;
242
243         if (_abortSemaphore->waitFor(100_ms)) {
244             _abortSemaphore->signal();
245             return;
246         }
247     }
248 }
249 @end
250
251 @interface WebAVSampleBufferErrorListener : NSObject {
252     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
253     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
254     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
255     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
256     ALLOW_NEW_API_WITHOUT_GUARDS_END
257 }
258
259 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent;
260 - (void)invalidate;
261 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
262 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
263 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
264 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
265 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
266 ALLOW_NEW_API_WITHOUT_GUARDS_END
267 @end
268
269 @implementation WebAVSampleBufferErrorListener
270
271 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent
272 {
273     if (!(self = [super init]))
274         return nil;
275
276     _parent = WTFMove(parent);
277     return self;
278 }
279
280 - (void)dealloc
281 {
282     [self invalidate];
283     [super dealloc];
284 }
285
286 - (void)invalidate
287 {
288     if (!_parent && !_layers.size() && !_renderers.size())
289         return;
290
291     for (auto& layer : _layers) {
292         [layer removeObserver:self forKeyPath:@"error"];
293         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
294     }
295     _layers.clear();
296
297     for (auto& renderer : _renderers)
298         [renderer removeObserver:self forKeyPath:@"error"];
299     _renderers.clear();
300
301     [[NSNotificationCenter defaultCenter] removeObserver:self];
302
303     _parent = nullptr;
304 }
305
306 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
307 {
308     ASSERT(_parent);
309     ASSERT(!_layers.contains(layer));
310
311     _layers.append(layer);
312     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
313     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
314     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
315 }
316
317 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
318 {
319     ASSERT(_parent);
320     ASSERT(_layers.contains(layer));
321
322     [layer removeObserver:self forKeyPath:@"error"];
323     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
324     _layers.remove(_layers.find(layer));
325
326     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
327 }
328
329 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
330 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
331 {
332 ALLOW_NEW_API_WITHOUT_GUARDS_END
333     ASSERT(_parent);
334     ASSERT(!_renderers.contains(renderer));
335
336     _renderers.append(renderer);
337     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
338 }
339
340 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
341 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
342 ALLOW_NEW_API_WITHOUT_GUARDS_END
343 {
344     ASSERT(_parent);
345     ASSERT(_renderers.contains(renderer));
346
347     [renderer removeObserver:self forKeyPath:@"error"];
348     _renderers.remove(_renderers.find(renderer));
349 }
350
351 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
352 {
353     UNUSED_PARAM(context);
354     UNUSED_PARAM(keyPath);
355     ASSERT(_parent);
356
357     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
358         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
359         ASSERT(_layers.contains(layer.get()));
360
361         if ([keyPath isEqualToString:@"error"]) {
362             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
363             if ([error isKindOfClass:[NSNull class]])
364                 return;
365
366             callOnMainThread([parent = _parent, layer = WTFMove(layer), error = WTFMove(error)] {
367                 if (parent)
368                     parent->layerDidReceiveError(layer.get(), error.get());
369             });
370         } else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"]) {
371             callOnMainThread([parent = _parent, obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
372                 if (parent)
373                     parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
374             });
375         } else
376             ASSERT_NOT_REACHED();
377
378     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
379         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
380         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
381         ALLOW_NEW_API_WITHOUT_GUARDS_END
382         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
383         if ([error isKindOfClass:[NSNull class]])
384             return;
385
386         ASSERT(_renderers.contains(renderer.get()));
387         ASSERT([keyPath isEqualToString:@"error"]);
388
389         callOnMainThread([parent = _parent, renderer = WTFMove(renderer), error = WTFMove(error)] {
390             if (parent)
391                 parent->rendererDidReceiveError(renderer.get(), error.get());
392         });
393     } else
394         ASSERT_NOT_REACHED();
395 }
396
397 - (void)layerFailedToDecode:(NSNotification*)note
398 {
399     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
400     if (!_layers.contains(layer.get()))
401         return;
402
403     callOnMainThread([parent = _parent, layer = WTFMove(layer), error = retainPtr([[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey])] {
404         if (parent)
405             parent->layerDidReceiveError(layer.get(), error.get());
406     });
407 }
408 @end
409
410 namespace WebCore {
411 using namespace PAL;
412
413 #pragma mark -
414 #pragma mark MediaDescriptionAVFObjC
415
416 class MediaDescriptionAVFObjC final : public MediaDescription {
417 public:
418     static Ref<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(*new MediaDescriptionAVFObjC(track)); }
419     virtual ~MediaDescriptionAVFObjC() { }
420
421     AtomicString codec() const override { return m_codec; }
422     bool isVideo() const override { return m_isVideo; }
423     bool isAudio() const override { return m_isAudio; }
424     bool isText() const override { return m_isText; }
425     
426 protected:
427     MediaDescriptionAVFObjC(AVAssetTrack* track)
428         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
429         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
430         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
431     {
432         NSArray* formatDescriptions = [track formatDescriptions];
433         CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
434         if (description) {
435             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
436             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
437         }
438     }
439
440     AtomicString m_codec;
441     bool m_isVideo;
442     bool m_isAudio;
443     bool m_isText;
444 };
445
446 #pragma mark -
447 #pragma mark SourceBufferPrivateAVFObjC
448
449 static HashMap<uint64_t, WeakPtr<SourceBufferPrivateAVFObjC>>& sourceBufferMap()
450 {
451     static NeverDestroyed<HashMap<uint64_t, WeakPtr<SourceBufferPrivateAVFObjC>>> map;
452     return map;
453 }
454
455 static uint64_t nextMapID()
456 {
457     static uint64_t mapID = 0;
458     return ++mapID;
459 }
460
461 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void* listener, CFStringRef notificationName, const void*, CFTypeRef)
462 {
463     if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
464         return;
465
466     if (!isMainThread()) {
467         callOnMainThread([notificationName, listener] {
468             bufferWasConsumedCallback(nullptr, listener, notificationName, nullptr, nullptr);
469         });
470         return;
471     }
472
473     uint64_t mapID = reinterpret_cast<uint64_t>(listener);
474     if (!mapID) {
475         RELEASE_LOG(MediaSource, "bufferWasConsumedCallback - ERROR: didn't find ID %llu in map", mapID);
476         return;
477     }
478
479     if (auto sourceBuffer = sourceBufferMap().get(mapID).get())
480         sourceBuffer->bufferWasConsumed();
481 }
482
483 Ref<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
484 {
485     return adoptRef(*new SourceBufferPrivateAVFObjC(parent));
486 }
487
488 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
489     : m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
490     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
491     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:createWeakPtr()]))
492     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
493     , m_mediaSource(parent)
494     , m_mapID(nextMapID())
495 #if !RELEASE_LOG_DISABLED
496     , m_logger(parent->logger())
497     , m_logIdentifier(parent->nextSourceBufferLogIdentifier())
498 #endif
499 {
500     ALWAYS_LOG(LOGIDENTIFIER);
501     
502     if (![getAVSampleBufferDisplayLayerClass() instancesRespondToSelector:@selector(prerollDecodeWithCompletionHandler:)])
503         CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), reinterpret_cast<void*>(m_mapID), bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
504     
505     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
506
507     sourceBufferMap().add(m_mapID, makeWeakPtr(*this));
508 }
509
510 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
511 {
512     ALWAYS_LOG(LOGIDENTIFIER);
513
514     ASSERT(!m_client);
515     sourceBufferMap().remove(m_mapID);
516     destroyParser();
517     destroyRenderers();
518
519     if (![getAVSampleBufferDisplayLayerClass() instancesRespondToSelector:@selector(prerollDecodeWithCompletionHandler:)])
520         CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
521
522     if (m_hasSessionSemaphore)
523         m_hasSessionSemaphore->signal();
524 }
525
526 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
527 {
528     ALWAYS_LOG(LOGIDENTIFIER);
529
530     if (!m_mediaSource)
531         return;
532
533     if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
534         for (AVAssetTrack *track in [asset tracks]) {
535             if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
536                 m_parsingSucceeded = false;
537                 return;
538             }
539         }
540     }
541
542     m_asset = asset;
543
544     m_videoTracks.clear();
545     m_audioTracks.clear();
546
547     m_discardSamplesUntilNextInitializationSegment = false;
548
549     SourceBufferPrivateClient::InitializationSegment segment;
550
551     if ([m_asset respondsToSelector:@selector(overallDurationHint)])
552         segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
553
554     if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
555         segment.duration = PAL::toMediaTime([m_asset duration]);
556
557     for (AVAssetTrack* track in [m_asset tracks]) {
558         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
559             // FIXME(125161): Handle in-band text tracks.
560             continue;
561         }
562
563         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
564             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
565             auto videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
566             info.track = videoTrack.copyRef();
567             m_videoTracks.append(WTFMove(videoTrack));
568             info.description = MediaDescriptionAVFObjC::create(track);
569             segment.videoTracks.append(info);
570         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
571             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
572             auto audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
573             info.track = audioTrack.copyRef();
574             m_audioTracks.append(WTFMove(audioTrack));
575             info.description = MediaDescriptionAVFObjC::create(track);
576             segment.audioTracks.append(info);
577         }
578
579         // FIXME(125161): Add TextTrack support
580     }
581
582     if (m_mediaSource)
583         m_mediaSource->player()->characteristicsChanged();
584
585     if (m_client)
586         m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
587 }
588
589 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
590 {
591 #if LOG_DISABLED
592     UNUSED_PARAM(error);
593 #endif
594     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
595
596     m_parsingSucceeded = false;
597 }
598
599 struct ProcessCodedFrameInfo {
600     SourceBufferPrivateAVFObjC* sourceBuffer;
601     int trackID;
602     const String& mediaType;
603 };
604
605 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
606 {
607     processCodedFrame(trackID, sampleBuffer, mediaType);
608 }
609
610 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
611 {
612     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
613         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
614         // will just confuse its state. Drop this sample until we can handle text tracks properly.
615         return false;
616     }
617
618     if (m_discardSamplesUntilNextInitializationSegment)
619         return false;
620
621     if (m_client) {
622         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
623         DEBUG_LOG(LOGIDENTIFIER, mediaSample.get());
624         m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
625     }
626
627     return true;
628 }
629
630 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
631 {
632     notImplemented();
633 }
634
635 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
636 {
637     if (!m_mediaSource)
638         return;
639
640     ASSERT(m_parser);
641
642 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
643     ALWAYS_LOG(LOGIDENTIFIER, "track = ", trackID);
644
645     m_protectedTrackID = trackID;
646
647     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
648         session->addParser(m_parser.get());
649     else if (!CDMSessionAVContentKeySession::isAvailable()) {
650         BEGIN_BLOCK_OBJC_EXCEPTIONS;
651         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
652         END_BLOCK_OBJC_EXCEPTIONS;
653     }
654 #else
655     UNUSED_PARAM(trackID);
656 #endif
657 }
658
659 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, Box<BinarySemaphore> hasSessionSemaphore)
660 {
661     if (!m_mediaSource)
662         return;
663
664 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
665     ALWAYS_LOG(LOGIDENTIFIER, "track = ", trackID);
666
667     m_protectedTrackID = trackID;
668     auto initDataArray = Uint8Array::create([initData length]);
669     [initData getBytes:initDataArray->data() length:initDataArray->length()];
670     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.ptr());
671     if (auto session = m_mediaSource->player()->cdmSession()) {
672         session->addParser(m_parser.get());
673         hasSessionSemaphore->signal();
674         return;
675     }
676 #endif
677
678     if (m_hasSessionSemaphore)
679         m_hasSessionSemaphore->signal();
680     m_hasSessionSemaphore = hasSessionSemaphore;
681     
682 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
683     auto initDataBuffer = SharedBuffer::create(initData);
684     auto keyIDs = CDMPrivateFairPlayStreaming::extractKeyIDsSinf(initDataBuffer);
685     if (!keyIDs)
686         return;
687
688     if (m_cdmInstance) {
689         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(keyIDs.value())) {
690             [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
691             if (m_hasSessionSemaphore) {
692                 m_hasSessionSemaphore->signal();
693                 m_hasSessionSemaphore = nullptr;
694             }
695             m_waitingForKey = false;
696             return;
697         }
698     }
699
700     m_keyIDs = WTFMove(keyIDs.value());
701     m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
702
703     m_waitingForKey = true;
704     m_mediaSource->player()->waitingForKeyChanged();
705 #endif
706
707     UNUSED_PARAM(initData);
708     UNUSED_PARAM(trackID);
709     UNUSED_PARAM(hasSessionSemaphore);
710 }
711
712 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
713 {
714     m_client = client;
715 }
716
717 static dispatch_queue_t globalDataParserQueue()
718 {
719     static dispatch_queue_t globalQueue;
720     static dispatch_once_t onceToken;
721     dispatch_once(&onceToken, ^{
722         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
723     });
724     return globalQueue;
725 }
726
727 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
728 {
729     DEBUG_LOG(LOGIDENTIFIER, "data length = ", data.size());
730
731     // FIXME: Avoid the data copy by wrapping around the Vector<> object.
732     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data.data() length:data.size()]);
733     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr(*this);
734     RetainPtr<AVStreamDataParser> parser = m_parser;
735     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
736
737     m_parsingSucceeded = true;
738     dispatch_group_enter(m_isAppendingGroup.get());
739
740     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
741         if (parserStateWasReset)
742             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
743         else
744             [parser appendStreamData:nsData.get()];
745
746         callOnMainThread([weakThis] {
747             if (weakThis)
748                 weakThis->appendCompleted();
749         });
750         dispatch_group_leave(isAppendingGroup.get());
751     });
752     m_parserStateWasReset = false;
753 }
754
755 void SourceBufferPrivateAVFObjC::appendCompleted()
756 {
757     if (m_parsingSucceeded && m_mediaSource)
758         m_mediaSource->player()->setLoadingProgresssed(true);
759
760     if (m_client)
761         m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
762 }
763
764 void SourceBufferPrivateAVFObjC::abort()
765 {
766     ALWAYS_LOG(LOGIDENTIFIER);
767
768     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
769     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
770     // semaphore, the m_isAppendingGroup wait operation will deadlock.
771     if (m_hasSessionSemaphore)
772         m_hasSessionSemaphore->signal();
773     m_delegate.get().abortSemaphore->signal();
774     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
775     m_appendWeakFactory.revokeAll();
776     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
777     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
778 }
779
780 void SourceBufferPrivateAVFObjC::resetParserState()
781 {
782     ALWAYS_LOG(LOGIDENTIFIER);
783
784     m_parserStateWasReset = true;
785     m_discardSamplesUntilNextInitializationSegment = true;
786 }
787
788 void SourceBufferPrivateAVFObjC::destroyParser()
789 {
790 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
791     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
792         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
793 #endif
794 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
795     if (m_cdmInstance) {
796         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs))
797             [instanceSession->contentKeySession() removeContentKeyRecipient:m_parser.get()];
798     }
799 #endif
800
801     [m_delegate invalidate];
802     m_delegate = nullptr;
803     m_parser = nullptr;
804 }
805
806 void SourceBufferPrivateAVFObjC::destroyRenderers()
807 {
808     if (m_displayLayer)
809         setVideoLayer(nullptr);
810
811     if (m_decompressionSession)
812         setDecompressionSession(nullptr);
813
814     for (auto& renderer : m_audioRenderers.values()) {
815         if (m_mediaSource)
816             m_mediaSource->player()->removeAudioRenderer(renderer.get());
817         [renderer flush];
818         [renderer stopRequestingMediaData];
819         [m_errorListener stopObservingRenderer:renderer.get()];
820     }
821
822     [m_errorListener invalidate];
823     m_errorListener = nullptr;
824
825     m_audioRenderers.clear();
826 }
827
828 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
829 {
830     ALWAYS_LOG(LOGIDENTIFIER);
831
832     destroyParser();
833     destroyRenderers();
834
835     if (m_mediaSource)
836         m_mediaSource->removeSourceBuffer(this);
837 }
838
839 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
840 {
841     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
842 }
843
844 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
845 {
846     ALWAYS_LOG(LOGIDENTIFIER, readyState);
847
848     if (m_mediaSource)
849         m_mediaSource->player()->setReadyState(readyState);
850 }
851
852 bool SourceBufferPrivateAVFObjC::hasVideo() const
853 {
854     return m_client && m_client->sourceBufferPrivateHasVideo();
855 }
856
857 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
858 {
859     return m_enabledVideoTrackID != -1;
860 }
861
862 bool SourceBufferPrivateAVFObjC::hasAudio() const
863 {
864     return m_client && m_client->sourceBufferPrivateHasAudio();
865 }
866
867 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
868 {
869     int trackID = track->trackID();
870
871     ALWAYS_LOG(LOGIDENTIFIER, "video trackID = ", trackID, ", selected = ", track->selected());
872
873     if (!track->selected() && m_enabledVideoTrackID == trackID) {
874         m_enabledVideoTrackID = -1;
875         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
876
877         if (m_decompressionSession)
878             m_decompressionSession->stopRequestingMediaData();
879     } else if (track->selected()) {
880         m_enabledVideoTrackID = trackID;
881         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
882
883         if (m_decompressionSession) {
884             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
885                 didBecomeReadyForMoreSamples(trackID);
886             });
887         }
888     }
889
890     m_mediaSource->hasSelectedVideoChanged(*this);
891 }
892
893 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
894 {
895     int trackID = track->trackID();
896
897     ALWAYS_LOG(LOGIDENTIFIER, "audio trackID = ", trackID, ", selected = ", track->enabled());
898
899     if (!track->enabled()) {
900         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
901         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
902         ALLOW_NEW_API_WITHOUT_GUARDS_END
903         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
904         if (m_mediaSource)
905             m_mediaSource->player()->removeAudioRenderer(renderer.get());
906     } else {
907         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
908         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
909         RetainPtr<AVSampleBufferAudioRenderer> renderer;
910         ALLOW_NEW_API_WITHOUT_GUARDS_END
911         if (!m_audioRenderers.contains(trackID)) {
912             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
913             auto weakThis = createWeakPtr();
914             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
915                 if (weakThis)
916                     weakThis->didBecomeReadyForMoreSamples(trackID);
917             }];
918             m_audioRenderers.set(trackID, renderer);
919             [m_errorListener beginObservingRenderer:renderer.get()];
920         } else
921             renderer = m_audioRenderers.get(trackID);
922
923         if (m_mediaSource)
924             m_mediaSource->player()->addAudioRenderer(renderer.get());
925     }
926 }
927
928 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
929 {
930 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
931     if (session == m_session)
932         return;
933
934     ALWAYS_LOG(LOGIDENTIFIER);
935
936     if (m_session)
937         m_session->removeSourceBuffer(this);
938
939     m_session = makeWeakPtr(session);
940
941     if (m_session) {
942         m_session->addSourceBuffer(this);
943         if (m_hasSessionSemaphore) {
944             m_hasSessionSemaphore->signal();
945             m_hasSessionSemaphore = nullptr;
946         }
947
948         if (m_hdcpError) {
949             WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
950             callOnMainThread([weakThis] {
951                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
952                     return;
953
954                 bool ignored = false;
955                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
956             });
957         }
958     }
959 #else
960     UNUSED_PARAM(session);
961 #endif
962 }
963
964 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
965 {
966 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
967     auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
968     if (fpsInstance == m_cdmInstance)
969         return;
970
971     ALWAYS_LOG(LOGIDENTIFIER);
972
973     m_cdmInstance = fpsInstance;
974     attemptToDecrypt();
975 #else
976     UNUSED_PARAM(instance);
977 #endif
978 }
979
980 void SourceBufferPrivateAVFObjC::attemptToDecrypt()
981 {
982 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
983     if (!m_cdmInstance || m_keyIDs.isEmpty() || !m_waitingForKey)
984         return;
985
986     auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs);
987     if (!instanceSession)
988         return;
989
990     [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
991     if (m_hasSessionSemaphore) {
992         m_hasSessionSemaphore->signal();
993         m_hasSessionSemaphore = nullptr;
994     }
995     m_waitingForKey = false;
996 #endif
997 }
998
999 void SourceBufferPrivateAVFObjC::flush()
1000 {
1001     flushVideo();
1002
1003     for (auto& renderer : m_audioRenderers.values())
1004         flush(renderer.get());
1005 }
1006
1007 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
1008 {
1009     ASSERT(!m_errorClients.contains(client));
1010     m_errorClients.append(client);
1011 }
1012
1013 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
1014 {
1015     ASSERT(m_errorClients.contains(client));
1016     m_errorClients.remove(m_errorClients.find(client));
1017 }
1018
1019 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
1020 {
1021     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
1022
1023     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
1024     bool anyIgnored = false;
1025     for (auto& client : m_errorClients) {
1026         bool shouldIgnore = false;
1027         client->layerDidReceiveError(layer, error, shouldIgnore);
1028         anyIgnored |= shouldIgnore;
1029     }
1030     if (anyIgnored)
1031         return;
1032
1033     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
1034
1035     if (m_client)
1036         m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
1037 }
1038
1039 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
1040 {
1041 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1042     if (m_mediaSource && m_mediaSource->cdmInstance()) {
1043         m_mediaSource->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
1044         return;
1045     }
1046 #else
1047     UNUSED_PARAM(obscured);
1048 #endif
1049
1050     ERROR_LOG(LOGIDENTIFIER, obscured);
1051
1052     RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
1053     layerDidReceiveError(m_displayLayer.get(), error.get());
1054 }
1055
1056 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1057 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
1058 ALLOW_NEW_API_WITHOUT_GUARDS_END
1059 {
1060     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
1061
1062     if ([error code] == 'HDCP')
1063         m_hdcpError = error;
1064
1065     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
1066     bool anyIgnored = false;
1067     for (auto& client : m_errorClients) {
1068         bool shouldIgnore = false;
1069         client->rendererDidReceiveError(renderer, error, shouldIgnore);
1070         anyIgnored |= shouldIgnore;
1071     }
1072     if (anyIgnored)
1073         return;
1074 }
1075
1076 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1077 {
1078     int trackID = trackIDString.toInt();
1079     DEBUG_LOG(LOGIDENTIFIER, trackID);
1080
1081     if (trackID == m_enabledVideoTrackID) {
1082         flushVideo();
1083     } else if (m_audioRenderers.contains(trackID))
1084         flush(m_audioRenderers.get(trackID).get());
1085 }
1086
1087 void SourceBufferPrivateAVFObjC::flushVideo()
1088 {
1089     DEBUG_LOG(LOGIDENTIFIER);
1090     [m_displayLayer flush];
1091
1092     if (m_decompressionSession) {
1093         m_decompressionSession->flush();
1094         m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1095             if (weakThis && weakThis->m_mediaSource)
1096                 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1097         });
1098     }
1099
1100     m_cachedSize = WTF::nullopt;
1101
1102     if (m_mediaSource) {
1103         m_mediaSource->player()->setHasAvailableVideoFrame(false);
1104         m_mediaSource->player()->flushPendingSizeChanges();
1105     }
1106 }
1107
1108 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1109 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1110 ALLOW_NEW_API_WITHOUT_GUARDS_END
1111 {
1112     [renderer flush];
1113
1114     if (m_mediaSource)
1115         m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1116 }
1117
1118 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1119 {
1120     int trackID = trackIDString.toInt();
1121     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1122         return;
1123
1124     PlatformSample platformSample = sample->platformSample();
1125     if (platformSample.type != PlatformSample::CMSampleBufferType)
1126         return;
1127
1128     DEBUG_LOG(LOGIDENTIFIER, "track ID = ", trackID, ", sample = ", sample.get());
1129
1130     if (trackID == m_enabledVideoTrackID) {
1131         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1132         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1133         if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1134             DEBUG_LOG(LOGIDENTIFIER, "size changed to ", formatSize);
1135             bool sizeWasNull = !m_cachedSize;
1136             m_cachedSize = formatSize;
1137             if (m_mediaSource) {
1138                 if (sizeWasNull)
1139                     m_mediaSource->player()->setNaturalSize(formatSize);
1140                 else
1141                     m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1142             }
1143         }
1144
1145         if (m_decompressionSession)
1146             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1147
1148         if (!m_displayLayer)
1149             return;
1150
1151         if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1152             DEBUG_LOG(LOGIDENTIFIER, "adding buffer attachment");
1153
1154             bool havePrerollDecodeWithCompletionHandler = [getAVSampleBufferDisplayLayerClass() instancesRespondToSelector:@selector(prerollDecodeWithCompletionHandler:)];
1155
1156             if (!havePrerollDecodeWithCompletionHandler) {
1157                 CMSampleBufferRef rawSampleCopy;
1158                 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1159                 auto sampleCopy = adoptCF(rawSampleCopy);
1160                 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{ (__bridge NSString *)kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed : @(YES) }, kCMAttachmentMode_ShouldNotPropagate);
1161                 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1162 #if PLATFORM(IOS_FAMILY)
1163                 m_mediaSource->player()->setHasAvailableVideoFrame(true);
1164 #endif
1165             } else {
1166                 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1167                 [m_displayLayer prerollDecodeWithCompletionHandler:[weakThis = createWeakPtr()] (BOOL success) mutable {
1168                     if (!success || !weakThis)
1169                         return;
1170
1171                     callOnMainThread([weakThis = WTFMove(weakThis)] () mutable {
1172                         if (!weakThis)
1173                             return;
1174
1175                         weakThis->bufferWasConsumed();
1176                     });
1177                 }];
1178             }
1179         } else
1180             [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1181
1182     } else {
1183         auto renderer = m_audioRenderers.get(trackID);
1184         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1185         if (m_mediaSource && !sample->isNonDisplaying())
1186             m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1187     }
1188 }
1189
1190 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1191 {
1192     DEBUG_LOG(LOGIDENTIFIER);
1193
1194     if (m_mediaSource)
1195         m_mediaSource->player()->setHasAvailableVideoFrame(true);
1196 }
1197
1198 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1199 {
1200     int trackID = trackIDString.toInt();
1201     if (trackID == m_enabledVideoTrackID) {
1202         if (m_decompressionSession)
1203             return m_decompressionSession->isReadyForMoreMediaData();
1204
1205         return [m_displayLayer isReadyForMoreMediaData];
1206     }
1207
1208     if (m_audioRenderers.contains(trackID))
1209         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1210
1211     return false;
1212 }
1213
1214 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1215 {
1216     ALWAYS_LOG(LOGIDENTIFIER, isActive);
1217     if (m_mediaSource)
1218         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1219 }
1220
1221 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
1222 {
1223     if (!m_client)
1224         return time;
1225     return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1226 }
1227
1228 void SourceBufferPrivateAVFObjC::willSeek()
1229 {
1230     ALWAYS_LOG(LOGIDENTIFIER);
1231     flush();
1232 }
1233
1234 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1235 {
1236     return m_cachedSize.valueOr(FloatSize());
1237 }
1238
1239 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1240 {
1241     INFO_LOG(LOGIDENTIFIER, trackID);
1242
1243     if (trackID == m_enabledVideoTrackID) {
1244         if (m_decompressionSession)
1245             m_decompressionSession->stopRequestingMediaData();
1246         [m_displayLayer stopRequestingMediaData];
1247     } else if (m_audioRenderers.contains(trackID))
1248         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1249     else
1250         return;
1251
1252     if (m_client)
1253         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1254 }
1255
1256 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1257 {
1258     int trackID = trackIDString.toInt();
1259     if (trackID == m_enabledVideoTrackID) {
1260         if (m_decompressionSession) {
1261             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1262                 didBecomeReadyForMoreSamples(trackID);
1263             });
1264         }
1265         if (m_displayLayer) {
1266             auto weakThis = createWeakPtr();
1267             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1268                 if (weakThis)
1269                     weakThis->didBecomeReadyForMoreSamples(trackID);
1270             }];
1271         }
1272     } else if (m_audioRenderers.contains(trackID)) {
1273         auto weakThis = createWeakPtr();
1274         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1275             if (weakThis)
1276                 weakThis->didBecomeReadyForMoreSamples(trackID);
1277         }];
1278     }
1279 }
1280
1281 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1282 {
1283     ALWAYS_LOG(LOGIDENTIFIER, contentType);
1284
1285     MediaEngineSupportParameters parameters;
1286     parameters.isMediaSource = true;
1287     parameters.type = contentType;
1288     return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1289 }
1290
1291 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1292 {
1293     if (layer == m_displayLayer)
1294         return;
1295
1296     ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1297
1298     if (m_displayLayer) {
1299         [m_displayLayer flush];
1300         [m_displayLayer stopRequestingMediaData];
1301         [m_errorListener stopObservingLayer:m_displayLayer.get()];
1302     }
1303
1304     m_displayLayer = layer;
1305
1306     if (m_displayLayer) {
1307         auto weakThis = createWeakPtr();
1308         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1309             if (weakThis)
1310                 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1311         }];
1312         [m_errorListener beginObservingLayer:m_displayLayer.get()];
1313         if (m_client)
1314             m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1315     }
1316 }
1317
1318 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1319 {
1320     if (m_decompressionSession == decompressionSession)
1321         return;
1322
1323     ALWAYS_LOG(LOGIDENTIFIER);
1324
1325     if (m_decompressionSession) {
1326         m_decompressionSession->stopRequestingMediaData();
1327         m_decompressionSession->invalidate();
1328     }
1329
1330     m_decompressionSession = decompressionSession;
1331
1332     if (!m_decompressionSession)
1333         return;
1334
1335     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1336     m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1337         if (weakThis)
1338             weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1339     });
1340     m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1341         if (weakThis && weakThis->m_mediaSource)
1342             weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1343     });
1344     if (m_client)
1345         m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1346 }
1347
1348 #if !RELEASE_LOG_DISABLED
1349 WTFLogChannel& SourceBufferPrivateAVFObjC::logChannel() const
1350 {
1351     return LogMediaSource;
1352 }
1353 #endif
1354
1355 }
1356
1357 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)