WeakPtr breaks vtables when upcasting to base classes
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMFairPlayStreaming.h"
34 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
35 #import "CDMSessionAVContentKeySession.h"
36 #import "CDMSessionMediaSourceAVFObjC.h"
37 #import "InbandTextTrackPrivateAVFObjC.h"
38 #import "Logging.h"
39 #import "MediaDescription.h"
40 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
41 #import "MediaSample.h"
42 #import "MediaSampleAVFObjC.h"
43 #import "MediaSourcePrivateAVFObjC.h"
44 #import "NotImplemented.h"
45 #import "SharedBuffer.h"
46 #import "SourceBufferPrivateClient.h"
47 #import "TimeRanges.h"
48 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
49 #import "WebCoreDecompressionSession.h"
50 #import <AVFoundation/AVAssetTrack.h>
51 #import <JavaScriptCore/TypedArrayInlines.h>
52 #import <QuartzCore/CALayer.h>
53 #import <objc/runtime.h>
54 #import <pal/avfoundation/MediaTimeAVFoundation.h>
55 #import <pal/spi/mac/AVFoundationSPI.h>
56 #import <wtf/BlockObjCExceptions.h>
57 #import <wtf/HashCountedSet.h>
58 #import <wtf/MainThread.h>
59 #import <wtf/SoftLinking.h>
60 #import <wtf/WTFSemaphore.h>
61 #import <wtf/WeakPtr.h>
62 #import <wtf/text/AtomicString.h>
63 #import <wtf/text/CString.h>
64
65 #pragma mark - Soft Linking
66
67 #import <pal/cf/CoreMediaSoftLink.h>
68 #import <pal/cocoa/AVFoundationSoftLink.h>
69
70 @interface AVSampleBufferDisplayLayer (WebCoreAVSampleBufferDisplayLayerQueueManagementPrivate)
71 - (void)prerollDecodeWithCompletionHandler:(void (^)(BOOL success))block;
72 @end
73
74 #pragma mark -
75 #pragma mark AVStreamSession
76
77 @interface AVStreamSession : NSObject
78 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
79 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
80 @end
81
82 #pragma mark -
83 #pragma mark WebAVStreamDataParserListener
84
85 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
86     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
87     Box<Semaphore> _abortSemaphore;
88     AVStreamDataParser* _parser;
89 }
90 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
91 @property (assign) Box<Semaphore> abortSemaphore;
92 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
93 @end
94
95 @implementation WebAVStreamDataParserListener
96 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
97 {
98     self = [super init];
99     if (!self)
100         return nil;
101
102     ASSERT(parent);
103     _parent = parent;
104     _parser = parser;
105     [_parser setDelegate:self];
106     return self;
107 }
108
109 @synthesize parent=_parent;
110 @synthesize abortSemaphore=_abortSemaphore;
111
112 - (void)dealloc
113 {
114     [_parser setDelegate:nil];
115     [super dealloc];
116 }
117
118 - (void)invalidate
119 {
120     [_parser setDelegate:nil];
121     _parser = nullptr;
122 }
123
124 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
125 {
126     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
127
128     RetainPtr<AVAsset> protectedAsset = asset;
129     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
130         if (parent)
131             parent->didParseStreamDataAsAsset(protectedAsset.get());
132     });
133 }
134
135 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
136 {
137     UNUSED_PARAM(discontinuity);
138     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
139
140     RetainPtr<AVAsset> protectedAsset = asset;
141     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
142         if (parent)
143             parent->didParseStreamDataAsAsset(protectedAsset.get());
144     });
145 }
146
147 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
148 {
149     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
150
151     RetainPtr<NSError> protectedError = error;
152     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
153         if (parent)
154             parent->didFailToParseStreamDataWithError(protectedError.get());
155     });
156 }
157
158 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
159 {
160     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
161
162     RetainPtr<CMSampleBufferRef> protectedSample = sample;
163     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
164         if (parent)
165             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
166     });
167 }
168
169 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
170 {
171     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
172
173     callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
174         if (parent)
175             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
176     });
177 }
178
179 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
180 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
181 IGNORE_WARNINGS_END
182 {
183     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
184
185     // We must call synchronously to the main thread, as the AVStreamSession must be associated
186     // with the streamDataParser before the delegate method returns.
187     Box<BinarySemaphore> respondedSemaphore = Box<BinarySemaphore>::create();
188     callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
189         if (parent)
190             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
191         respondedSemaphore->signal();
192     });
193
194     while (true) {
195         if (respondedSemaphore->waitFor(100_ms))
196             return;
197
198         if (_abortSemaphore->waitFor(100_ms)) {
199             _abortSemaphore->signal();
200             return;
201         }
202     }
203 }
204
205 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
206 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
207 IGNORE_WARNINGS_END
208 {
209     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
210
211     Box<BinarySemaphore> hasSessionSemaphore = Box<BinarySemaphore>::create();
212     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
213         if (parent)
214             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
215     });
216
217     while (true) {
218         if (hasSessionSemaphore->waitFor(100_ms))
219             return;
220
221         if (_abortSemaphore->waitFor(100_ms)) {
222             _abortSemaphore->signal();
223             return;
224         }
225     }
226 }
227 @end
228
229 @interface WebAVSampleBufferErrorListener : NSObject {
230     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
231     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
232     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
233     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
234     ALLOW_NEW_API_WITHOUT_GUARDS_END
235 }
236
237 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent;
238 - (void)invalidate;
239 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
240 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
241 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
242 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
243 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
244 ALLOW_NEW_API_WITHOUT_GUARDS_END
245 @end
246
247 @implementation WebAVSampleBufferErrorListener
248
249 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent
250 {
251     if (!(self = [super init]))
252         return nil;
253
254     _parent = WTFMove(parent);
255     return self;
256 }
257
258 - (void)dealloc
259 {
260     [self invalidate];
261     [super dealloc];
262 }
263
264 - (void)invalidate
265 {
266     if (!_parent && !_layers.size() && !_renderers.size())
267         return;
268
269     for (auto& layer : _layers) {
270         [layer removeObserver:self forKeyPath:@"error"];
271         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
272     }
273     _layers.clear();
274
275     for (auto& renderer : _renderers)
276         [renderer removeObserver:self forKeyPath:@"error"];
277     _renderers.clear();
278
279     [[NSNotificationCenter defaultCenter] removeObserver:self];
280
281     _parent = nullptr;
282 }
283
284 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
285 {
286     ASSERT(_parent);
287     ASSERT(!_layers.contains(layer));
288
289     _layers.append(layer);
290     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
291     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
292     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
293 }
294
295 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
296 {
297     ASSERT(_parent);
298     ASSERT(_layers.contains(layer));
299
300     [layer removeObserver:self forKeyPath:@"error"];
301     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
302     _layers.remove(_layers.find(layer));
303
304     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
305 }
306
307 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
308 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
309 {
310 ALLOW_NEW_API_WITHOUT_GUARDS_END
311     ASSERT(_parent);
312     ASSERT(!_renderers.contains(renderer));
313
314     _renderers.append(renderer);
315     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
316 }
317
318 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
319 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
320 ALLOW_NEW_API_WITHOUT_GUARDS_END
321 {
322     ASSERT(_parent);
323     ASSERT(_renderers.contains(renderer));
324
325     [renderer removeObserver:self forKeyPath:@"error"];
326     _renderers.remove(_renderers.find(renderer));
327 }
328
329 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
330 {
331     UNUSED_PARAM(context);
332     UNUSED_PARAM(keyPath);
333     ASSERT(_parent);
334
335     if ([object isKindOfClass:PAL::getAVSampleBufferDisplayLayerClass()]) {
336         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
337         ASSERT(_layers.contains(layer.get()));
338
339         if ([keyPath isEqualToString:@"error"]) {
340             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
341             if ([error isKindOfClass:[NSNull class]])
342                 return;
343
344             callOnMainThread([parent = _parent, layer = WTFMove(layer), error = WTFMove(error)] {
345                 if (parent)
346                     parent->layerDidReceiveError(layer.get(), error.get());
347             });
348         } else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"]) {
349             callOnMainThread([parent = _parent, obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
350                 if (parent)
351                     parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
352             });
353         } else
354             ASSERT_NOT_REACHED();
355
356     } else if ([object isKindOfClass:PAL::getAVSampleBufferAudioRendererClass()]) {
357         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
358         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
359         ALLOW_NEW_API_WITHOUT_GUARDS_END
360         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
361         if ([error isKindOfClass:[NSNull class]])
362             return;
363
364         ASSERT(_renderers.contains(renderer.get()));
365         ASSERT([keyPath isEqualToString:@"error"]);
366
367         callOnMainThread([parent = _parent, renderer = WTFMove(renderer), error = WTFMove(error)] {
368             if (parent)
369                 parent->rendererDidReceiveError(renderer.get(), error.get());
370         });
371     } else
372         ASSERT_NOT_REACHED();
373 }
374
375 - (void)layerFailedToDecode:(NSNotification*)note
376 {
377     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
378     if (!_layers.contains(layer.get()))
379         return;
380
381     callOnMainThread([parent = _parent, layer = WTFMove(layer), error = retainPtr([[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey])] {
382         if (parent)
383             parent->layerDidReceiveError(layer.get(), error.get());
384     });
385 }
386 @end
387
388 namespace WebCore {
389 using namespace PAL;
390
391 #pragma mark -
392 #pragma mark MediaDescriptionAVFObjC
393
394 class MediaDescriptionAVFObjC final : public MediaDescription {
395 public:
396     static Ref<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(*new MediaDescriptionAVFObjC(track)); }
397     virtual ~MediaDescriptionAVFObjC() { }
398
399     AtomicString codec() const override { return m_codec; }
400     bool isVideo() const override { return m_isVideo; }
401     bool isAudio() const override { return m_isAudio; }
402     bool isText() const override { return m_isText; }
403     
404 protected:
405     MediaDescriptionAVFObjC(AVAssetTrack* track)
406         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
407         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
408         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
409     {
410         NSArray* formatDescriptions = [track formatDescriptions];
411         CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
412         if (description) {
413             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
414             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
415         }
416     }
417
418     AtomicString m_codec;
419     bool m_isVideo;
420     bool m_isAudio;
421     bool m_isText;
422 };
423
424 #pragma mark -
425 #pragma mark SourceBufferPrivateAVFObjC
426
427 static HashMap<uint64_t, WeakPtr<SourceBufferPrivateAVFObjC>>& sourceBufferMap()
428 {
429     static NeverDestroyed<HashMap<uint64_t, WeakPtr<SourceBufferPrivateAVFObjC>>> map;
430     return map;
431 }
432
433 static uint64_t nextMapID()
434 {
435     static uint64_t mapID = 0;
436     return ++mapID;
437 }
438
439 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void* listener, CFStringRef notificationName, const void*, CFTypeRef)
440 {
441     if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
442         return;
443
444     if (!isMainThread()) {
445         callOnMainThread([notificationName, listener] {
446             bufferWasConsumedCallback(nullptr, listener, notificationName, nullptr, nullptr);
447         });
448         return;
449     }
450
451     uint64_t mapID = reinterpret_cast<uint64_t>(listener);
452     if (!mapID) {
453         RELEASE_LOG(MediaSource, "bufferWasConsumedCallback - ERROR: didn't find ID %llu in map", mapID);
454         return;
455     }
456
457     if (auto sourceBuffer = sourceBufferMap().get(mapID).get())
458         sourceBuffer->bufferWasConsumed();
459 }
460
461 Ref<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
462 {
463     return adoptRef(*new SourceBufferPrivateAVFObjC(parent));
464 }
465
466 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
467     : m_parser(adoptNS([PAL::allocAVStreamDataParserInstance() init]))
468     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:makeWeakPtr(*this)]))
469     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:makeWeakPtr(*this)]))
470     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
471     , m_mediaSource(parent)
472     , m_mapID(nextMapID())
473 #if !RELEASE_LOG_DISABLED
474     , m_logger(parent->logger())
475     , m_logIdentifier(parent->nextSourceBufferLogIdentifier())
476 #endif
477 {
478     ALWAYS_LOG(LOGIDENTIFIER);
479
480     if (![PAL::getAVSampleBufferDisplayLayerClass() instancesRespondToSelector:@selector(prerollDecodeWithCompletionHandler:)])
481         CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), reinterpret_cast<void*>(m_mapID), bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
482     
483     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
484
485     sourceBufferMap().add(m_mapID, makeWeakPtr(*this));
486 }
487
488 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
489 {
490     ALWAYS_LOG(LOGIDENTIFIER);
491
492     ASSERT(!m_client);
493     sourceBufferMap().remove(m_mapID);
494     destroyParser();
495     destroyRenderers();
496
497     if (![PAL::getAVSampleBufferDisplayLayerClass() instancesRespondToSelector:@selector(prerollDecodeWithCompletionHandler:)])
498         CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
499
500     if (m_hasSessionSemaphore)
501         m_hasSessionSemaphore->signal();
502 }
503
504 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
505 {
506     ALWAYS_LOG(LOGIDENTIFIER);
507
508     if (!m_mediaSource)
509         return;
510
511     if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
512         for (AVAssetTrack *track in [asset tracks]) {
513             if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
514                 m_parsingSucceeded = false;
515                 return;
516             }
517         }
518     }
519
520     m_asset = asset;
521
522     m_videoTracks.clear();
523     m_audioTracks.clear();
524
525     m_discardSamplesUntilNextInitializationSegment = false;
526
527     SourceBufferPrivateClient::InitializationSegment segment;
528
529     if ([m_asset respondsToSelector:@selector(overallDurationHint)])
530         segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
531
532     if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
533         segment.duration = PAL::toMediaTime([m_asset duration]);
534
535     for (AVAssetTrack* track in [m_asset tracks]) {
536         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
537             // FIXME(125161): Handle in-band text tracks.
538             continue;
539         }
540
541         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
542             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
543             auto videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
544             info.track = videoTrack.copyRef();
545             m_videoTracks.append(WTFMove(videoTrack));
546             info.description = MediaDescriptionAVFObjC::create(track);
547             segment.videoTracks.append(info);
548         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
549             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
550             auto audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
551             info.track = audioTrack.copyRef();
552             m_audioTracks.append(WTFMove(audioTrack));
553             info.description = MediaDescriptionAVFObjC::create(track);
554             segment.audioTracks.append(info);
555         }
556
557         // FIXME(125161): Add TextTrack support
558     }
559
560     if (m_mediaSource)
561         m_mediaSource->player()->characteristicsChanged();
562
563     if (m_client)
564         m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
565 }
566
567 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
568 {
569 #if LOG_DISABLED
570     UNUSED_PARAM(error);
571 #endif
572     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
573
574     m_parsingSucceeded = false;
575 }
576
577 struct ProcessCodedFrameInfo {
578     SourceBufferPrivateAVFObjC* sourceBuffer;
579     int trackID;
580     const String& mediaType;
581 };
582
583 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
584 {
585     processCodedFrame(trackID, sampleBuffer, mediaType);
586 }
587
588 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
589 {
590     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
591         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
592         // will just confuse its state. Drop this sample until we can handle text tracks properly.
593         return false;
594     }
595
596     if (m_discardSamplesUntilNextInitializationSegment)
597         return false;
598
599     if (m_client) {
600         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
601         DEBUG_LOG(LOGIDENTIFIER, mediaSample.get());
602         m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
603     }
604
605     return true;
606 }
607
608 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
609 {
610     notImplemented();
611 }
612
613 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
614 {
615     if (!m_mediaSource)
616         return;
617
618     ASSERT(m_parser);
619
620 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
621     ALWAYS_LOG(LOGIDENTIFIER, "track = ", trackID);
622
623     m_protectedTrackID = trackID;
624
625     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
626         session->addParser(m_parser.get());
627     else if (!CDMSessionAVContentKeySession::isAvailable()) {
628         BEGIN_BLOCK_OBJC_EXCEPTIONS;
629         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
630         END_BLOCK_OBJC_EXCEPTIONS;
631     }
632 #else
633     UNUSED_PARAM(trackID);
634 #endif
635 }
636
637 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, Box<BinarySemaphore> hasSessionSemaphore)
638 {
639     if (!m_mediaSource)
640         return;
641
642 #if HAVE(AVCONTENTKEYSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
643     ALWAYS_LOG(LOGIDENTIFIER, "track = ", trackID);
644
645     m_protectedTrackID = trackID;
646     m_initData = Uint8Array::create([initData length]);
647     [initData getBytes:m_initData->data() length:m_initData->length()];
648     m_mediaSource->sourceBufferKeyNeeded(this, m_initData.get());
649     if (auto session = m_mediaSource->player()->cdmSession()) {
650         session->addParser(m_parser.get());
651         hasSessionSemaphore->signal();
652         return;
653     }
654 #endif
655
656     if (m_hasSessionSemaphore)
657         m_hasSessionSemaphore->signal();
658     m_hasSessionSemaphore = hasSessionSemaphore;
659     
660 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
661     auto initDataBuffer = SharedBuffer::create(initData);
662     auto keyIDs = CDMPrivateFairPlayStreaming::extractKeyIDsSinf(initDataBuffer);
663     if (!keyIDs)
664         return;
665
666     if (m_cdmInstance) {
667         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(keyIDs.value())) {
668             [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
669             if (m_hasSessionSemaphore) {
670                 m_hasSessionSemaphore->signal();
671                 m_hasSessionSemaphore = nullptr;
672             }
673             m_waitingForKey = false;
674             return;
675         }
676     }
677
678     m_keyIDs = WTFMove(keyIDs.value());
679     m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
680
681     m_waitingForKey = true;
682     m_mediaSource->player()->waitingForKeyChanged();
683 #endif
684
685     UNUSED_PARAM(initData);
686     UNUSED_PARAM(trackID);
687     UNUSED_PARAM(hasSessionSemaphore);
688 }
689
690 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
691 {
692     m_client = client;
693 }
694
695 static dispatch_queue_t globalDataParserQueue()
696 {
697     static dispatch_queue_t globalQueue;
698     static dispatch_once_t onceToken;
699     dispatch_once(&onceToken, ^{
700         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
701     });
702     return globalQueue;
703 }
704
705 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
706 {
707     DEBUG_LOG(LOGIDENTIFIER, "data length = ", data.size());
708
709     m_parsingSucceeded = true;
710     dispatch_group_enter(m_isAppendingGroup.get());
711
712     dispatch_async(globalDataParserQueue(), [data = WTFMove(data), weakThis = m_appendWeakFactory.createWeakPtr(*this), parser = m_parser, delegate = m_delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] () mutable {
713         auto sharedData = SharedBuffer::create(WTFMove(data));
714         auto nsData = sharedData->createNSData();
715         if (parserStateWasReset)
716             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
717         else
718             [parser appendStreamData:nsData.get()];
719
720         callOnMainThread([weakThis] {
721             if (weakThis)
722                 weakThis->appendCompleted();
723         });
724         dispatch_group_leave(isAppendingGroup.get());
725     });
726     m_parserStateWasReset = false;
727 }
728
729 void SourceBufferPrivateAVFObjC::appendCompleted()
730 {
731     if (m_parsingSucceeded && m_mediaSource)
732         m_mediaSource->player()->setLoadingProgresssed(true);
733
734     if (m_client)
735         m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
736 }
737
738 void SourceBufferPrivateAVFObjC::abort()
739 {
740     ALWAYS_LOG(LOGIDENTIFIER);
741
742     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
743     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
744     // semaphore, the m_isAppendingGroup wait operation will deadlock.
745     if (m_hasSessionSemaphore)
746         m_hasSessionSemaphore->signal();
747     m_delegate.get().abortSemaphore->signal();
748     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
749     m_appendWeakFactory.revokeAll();
750     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
751     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
752 }
753
754 void SourceBufferPrivateAVFObjC::resetParserState()
755 {
756     ALWAYS_LOG(LOGIDENTIFIER);
757
758     m_parserStateWasReset = true;
759     m_discardSamplesUntilNextInitializationSegment = true;
760 }
761
762 void SourceBufferPrivateAVFObjC::destroyParser()
763 {
764 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
765     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
766         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
767 #endif
768 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
769     if (m_cdmInstance) {
770         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs))
771             [instanceSession->contentKeySession() removeContentKeyRecipient:m_parser.get()];
772     }
773 #endif
774
775     [m_delegate invalidate];
776     m_delegate = nullptr;
777     m_parser = nullptr;
778 }
779
780 void SourceBufferPrivateAVFObjC::destroyRenderers()
781 {
782     if (m_displayLayer)
783         setVideoLayer(nullptr);
784
785     if (m_decompressionSession)
786         setDecompressionSession(nullptr);
787
788     for (auto& renderer : m_audioRenderers.values()) {
789         if (m_mediaSource)
790             m_mediaSource->player()->removeAudioRenderer(renderer.get());
791         [renderer flush];
792         [renderer stopRequestingMediaData];
793         [m_errorListener stopObservingRenderer:renderer.get()];
794     }
795
796     [m_errorListener invalidate];
797     m_errorListener = nullptr;
798
799     m_audioRenderers.clear();
800 }
801
802 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
803 {
804     ALWAYS_LOG(LOGIDENTIFIER);
805
806     destroyParser();
807     destroyRenderers();
808
809     if (m_mediaSource)
810         m_mediaSource->removeSourceBuffer(this);
811 }
812
813 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
814 {
815     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
816 }
817
818 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
819 {
820     ALWAYS_LOG(LOGIDENTIFIER, readyState);
821
822     if (m_mediaSource)
823         m_mediaSource->player()->setReadyState(readyState);
824 }
825
826 bool SourceBufferPrivateAVFObjC::hasVideo() const
827 {
828     return m_client && m_client->sourceBufferPrivateHasVideo();
829 }
830
831 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
832 {
833     return m_enabledVideoTrackID != -1;
834 }
835
836 bool SourceBufferPrivateAVFObjC::hasAudio() const
837 {
838     return m_client && m_client->sourceBufferPrivateHasAudio();
839 }
840
841 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
842 {
843     int trackID = track->trackID();
844
845     ALWAYS_LOG(LOGIDENTIFIER, "video trackID = ", trackID, ", selected = ", track->selected());
846
847     if (!track->selected() && m_enabledVideoTrackID == trackID) {
848         m_enabledVideoTrackID = -1;
849         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
850
851         if (m_decompressionSession)
852             m_decompressionSession->stopRequestingMediaData();
853     } else if (track->selected()) {
854         m_enabledVideoTrackID = trackID;
855         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
856
857         if (m_decompressionSession) {
858             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
859                 didBecomeReadyForMoreSamples(trackID);
860             });
861         }
862     }
863
864     m_mediaSource->hasSelectedVideoChanged(*this);
865 }
866
867 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
868 {
869     int trackID = track->trackID();
870
871     ALWAYS_LOG(LOGIDENTIFIER, "audio trackID = ", trackID, ", selected = ", track->enabled());
872
873     if (!track->enabled()) {
874         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
875         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
876         ALLOW_NEW_API_WITHOUT_GUARDS_END
877         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
878         if (m_mediaSource)
879             m_mediaSource->player()->removeAudioRenderer(renderer.get());
880     } else {
881         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
882         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
883         RetainPtr<AVSampleBufferAudioRenderer> renderer;
884         ALLOW_NEW_API_WITHOUT_GUARDS_END
885         if (!m_audioRenderers.contains(trackID)) {
886             renderer = adoptNS([PAL::allocAVSampleBufferAudioRendererInstance() init]);
887             auto weakThis = makeWeakPtr(*this);
888             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
889                 if (weakThis)
890                     weakThis->didBecomeReadyForMoreSamples(trackID);
891             }];
892             m_audioRenderers.set(trackID, renderer);
893             [m_errorListener beginObservingRenderer:renderer.get()];
894         } else
895             renderer = m_audioRenderers.get(trackID);
896
897         if (m_mediaSource)
898             m_mediaSource->player()->addAudioRenderer(renderer.get());
899     }
900 }
901
902 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
903 {
904 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
905     if (session == m_session)
906         return;
907
908     ALWAYS_LOG(LOGIDENTIFIER);
909
910     if (m_session)
911         m_session->removeSourceBuffer(this);
912
913     m_session = makeWeakPtr(session);
914
915     if (m_session) {
916         m_session->addSourceBuffer(this);
917         if (m_hasSessionSemaphore) {
918             m_hasSessionSemaphore->signal();
919             m_hasSessionSemaphore = nullptr;
920         }
921
922         if (m_hdcpError) {
923             callOnMainThread([weakThis = makeWeakPtr(*this)] {
924                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
925                     return;
926
927                 bool ignored = false;
928                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
929             });
930         }
931     }
932 #else
933     UNUSED_PARAM(session);
934 #endif
935 }
936
937 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
938 {
939 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
940     auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
941     if (fpsInstance == m_cdmInstance)
942         return;
943
944     ALWAYS_LOG(LOGIDENTIFIER);
945
946     m_cdmInstance = fpsInstance;
947     attemptToDecrypt();
948 #else
949     UNUSED_PARAM(instance);
950 #endif
951 }
952
953 void SourceBufferPrivateAVFObjC::attemptToDecrypt()
954 {
955 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
956     if (!m_cdmInstance || m_keyIDs.isEmpty() || !m_waitingForKey)
957         return;
958
959     auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs);
960     if (!instanceSession)
961         return;
962
963     [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
964     if (m_hasSessionSemaphore) {
965         m_hasSessionSemaphore->signal();
966         m_hasSessionSemaphore = nullptr;
967     }
968     m_waitingForKey = false;
969 #endif
970 }
971
972 void SourceBufferPrivateAVFObjC::flush()
973 {
974     flushVideo();
975
976     for (auto& renderer : m_audioRenderers.values())
977         flush(renderer.get());
978 }
979
980 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
981 {
982     ASSERT(!m_errorClients.contains(client));
983     m_errorClients.append(client);
984 }
985
986 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
987 {
988     ASSERT(m_errorClients.contains(client));
989     m_errorClients.remove(m_errorClients.find(client));
990 }
991
992 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
993 {
994     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
995
996     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
997     bool anyIgnored = false;
998     for (auto& client : m_errorClients) {
999         bool shouldIgnore = false;
1000         client->layerDidReceiveError(layer, error, shouldIgnore);
1001         anyIgnored |= shouldIgnore;
1002     }
1003     if (anyIgnored)
1004         return;
1005
1006     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
1007
1008     if (m_client)
1009         m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
1010 }
1011
1012 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
1013 {
1014 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1015     if (m_mediaSource && m_mediaSource->cdmInstance()) {
1016         m_mediaSource->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
1017         return;
1018     }
1019 #else
1020     UNUSED_PARAM(obscured);
1021 #endif
1022
1023     ERROR_LOG(LOGIDENTIFIER, obscured);
1024
1025     RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
1026     layerDidReceiveError(m_displayLayer.get(), error.get());
1027 }
1028
1029 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1030 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
1031 ALLOW_NEW_API_WITHOUT_GUARDS_END
1032 {
1033     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
1034
1035     if ([error code] == 'HDCP')
1036         m_hdcpError = error;
1037
1038     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
1039     bool anyIgnored = false;
1040     for (auto& client : m_errorClients) {
1041         bool shouldIgnore = false;
1042         client->rendererDidReceiveError(renderer, error, shouldIgnore);
1043         anyIgnored |= shouldIgnore;
1044     }
1045     if (anyIgnored)
1046         return;
1047 }
1048
1049 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1050 {
1051     int trackID = trackIDString.toInt();
1052     DEBUG_LOG(LOGIDENTIFIER, trackID);
1053
1054     if (trackID == m_enabledVideoTrackID) {
1055         flushVideo();
1056     } else if (m_audioRenderers.contains(trackID))
1057         flush(m_audioRenderers.get(trackID).get());
1058 }
1059
1060 void SourceBufferPrivateAVFObjC::flushVideo()
1061 {
1062     DEBUG_LOG(LOGIDENTIFIER);
1063     [m_displayLayer flush];
1064
1065     if (m_decompressionSession) {
1066         m_decompressionSession->flush();
1067         m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = makeWeakPtr(*this)] {
1068             if (weakThis && weakThis->m_mediaSource)
1069                 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1070         });
1071     }
1072
1073     m_cachedSize = WTF::nullopt;
1074
1075     if (m_mediaSource) {
1076         m_mediaSource->player()->setHasAvailableVideoFrame(false);
1077         m_mediaSource->player()->flushPendingSizeChanges();
1078     }
1079 }
1080
1081 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1082 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1083 ALLOW_NEW_API_WITHOUT_GUARDS_END
1084 {
1085     [renderer flush];
1086
1087     if (m_mediaSource)
1088         m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1089 }
1090
1091 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1092 {
1093     int trackID = trackIDString.toInt();
1094     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1095         return;
1096
1097     PlatformSample platformSample = sample->platformSample();
1098     if (platformSample.type != PlatformSample::CMSampleBufferType)
1099         return;
1100
1101     DEBUG_LOG(LOGIDENTIFIER, "track ID = ", trackID, ", sample = ", sample.get());
1102
1103     if (trackID == m_enabledVideoTrackID) {
1104         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1105         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1106         if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1107             DEBUG_LOG(LOGIDENTIFIER, "size changed to ", formatSize);
1108             bool sizeWasNull = !m_cachedSize;
1109             m_cachedSize = formatSize;
1110             if (m_mediaSource) {
1111                 if (sizeWasNull)
1112                     m_mediaSource->player()->setNaturalSize(formatSize);
1113                 else
1114                     m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1115             }
1116         }
1117
1118         if (m_decompressionSession)
1119             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1120
1121         if (!m_displayLayer)
1122             return;
1123
1124         if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1125             DEBUG_LOG(LOGIDENTIFIER, "adding buffer attachment");
1126
1127             bool havePrerollDecodeWithCompletionHandler = [PAL::getAVSampleBufferDisplayLayerClass() instancesRespondToSelector:@selector(prerollDecodeWithCompletionHandler:)];
1128
1129             if (!havePrerollDecodeWithCompletionHandler) {
1130                 CMSampleBufferRef rawSampleCopy;
1131                 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1132                 auto sampleCopy = adoptCF(rawSampleCopy);
1133                 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{ (__bridge NSString *)kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed : @(YES) }, kCMAttachmentMode_ShouldNotPropagate);
1134                 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1135 #if PLATFORM(IOS_FAMILY)
1136                 m_mediaSource->player()->setHasAvailableVideoFrame(true);
1137 #endif
1138             } else {
1139                 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1140                 [m_displayLayer prerollDecodeWithCompletionHandler:[weakThis = makeWeakPtr(*this)] (BOOL success) mutable {
1141                     if (!success || !weakThis)
1142                         return;
1143
1144                     callOnMainThread([weakThis = WTFMove(weakThis)] () mutable {
1145                         if (!weakThis)
1146                             return;
1147
1148                         weakThis->bufferWasConsumed();
1149                     });
1150                 }];
1151             }
1152         } else
1153             [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1154
1155     } else {
1156         auto renderer = m_audioRenderers.get(trackID);
1157         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1158         if (m_mediaSource && !sample->isNonDisplaying())
1159             m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1160     }
1161 }
1162
1163 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1164 {
1165     DEBUG_LOG(LOGIDENTIFIER);
1166
1167     if (m_mediaSource)
1168         m_mediaSource->player()->setHasAvailableVideoFrame(true);
1169 }
1170
1171 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1172 {
1173     int trackID = trackIDString.toInt();
1174     if (trackID == m_enabledVideoTrackID) {
1175         if (m_decompressionSession)
1176             return m_decompressionSession->isReadyForMoreMediaData();
1177
1178         return [m_displayLayer isReadyForMoreMediaData];
1179     }
1180
1181     if (m_audioRenderers.contains(trackID))
1182         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1183
1184     return false;
1185 }
1186
1187 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1188 {
1189     ALWAYS_LOG(LOGIDENTIFIER, isActive);
1190     if (m_mediaSource)
1191         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1192 }
1193
1194 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
1195 {
1196     if (!m_client)
1197         return time;
1198     return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1199 }
1200
1201 void SourceBufferPrivateAVFObjC::willSeek()
1202 {
1203     ALWAYS_LOG(LOGIDENTIFIER);
1204     flush();
1205 }
1206
1207 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1208 {
1209     return m_cachedSize.valueOr(FloatSize());
1210 }
1211
1212 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1213 {
1214     INFO_LOG(LOGIDENTIFIER, trackID);
1215
1216     if (trackID == m_enabledVideoTrackID) {
1217         if (m_decompressionSession)
1218             m_decompressionSession->stopRequestingMediaData();
1219         [m_displayLayer stopRequestingMediaData];
1220     } else if (m_audioRenderers.contains(trackID))
1221         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1222     else
1223         return;
1224
1225     if (m_client)
1226         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1227 }
1228
1229 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1230 {
1231     int trackID = trackIDString.toInt();
1232     if (trackID == m_enabledVideoTrackID) {
1233         if (m_decompressionSession) {
1234             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1235                 didBecomeReadyForMoreSamples(trackID);
1236             });
1237         }
1238         if (m_displayLayer) {
1239             auto weakThis = makeWeakPtr(*this);
1240             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1241                 if (weakThis)
1242                     weakThis->didBecomeReadyForMoreSamples(trackID);
1243             }];
1244         }
1245     } else if (m_audioRenderers.contains(trackID)) {
1246         auto weakThis = makeWeakPtr(*this);
1247         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1248             if (weakThis)
1249                 weakThis->didBecomeReadyForMoreSamples(trackID);
1250         }];
1251     }
1252 }
1253
1254 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1255 {
1256     ALWAYS_LOG(LOGIDENTIFIER, contentType);
1257
1258     MediaEngineSupportParameters parameters;
1259     parameters.isMediaSource = true;
1260     parameters.type = contentType;
1261     return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1262 }
1263
1264 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1265 {
1266     if (layer == m_displayLayer)
1267         return;
1268
1269     ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1270
1271     if (m_displayLayer) {
1272         [m_displayLayer flush];
1273         [m_displayLayer stopRequestingMediaData];
1274         [m_errorListener stopObservingLayer:m_displayLayer.get()];
1275     }
1276
1277     m_displayLayer = layer;
1278
1279     if (m_displayLayer) {
1280         auto weakThis = makeWeakPtr(*this);
1281         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1282             if (weakThis)
1283                 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1284         }];
1285         [m_errorListener beginObservingLayer:m_displayLayer.get()];
1286         if (m_client)
1287             m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1288     }
1289 }
1290
1291 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1292 {
1293     if (m_decompressionSession == decompressionSession)
1294         return;
1295
1296     ALWAYS_LOG(LOGIDENTIFIER);
1297
1298     if (m_decompressionSession) {
1299         m_decompressionSession->stopRequestingMediaData();
1300         m_decompressionSession->invalidate();
1301     }
1302
1303     m_decompressionSession = decompressionSession;
1304
1305     if (!m_decompressionSession)
1306         return;
1307
1308     m_decompressionSession->requestMediaDataWhenReady([weakThis = makeWeakPtr(*this)] {
1309         if (weakThis)
1310             weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1311     });
1312     m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = makeWeakPtr(*this)] {
1313         if (weakThis && weakThis->m_mediaSource)
1314             weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1315     });
1316     if (m_client)
1317         m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1318 }
1319
1320 #if !RELEASE_LOG_DISABLED
1321 WTFLogChannel& SourceBufferPrivateAVFObjC::logChannel() const
1322 {
1323     return LogMediaSource;
1324 }
1325 #endif
1326
1327 }
1328
1329 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)