ca7042afd756ceb214989897c911ca1e402879fe
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMFairPlayStreaming.h"
34 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
35 #import "CDMSessionAVContentKeySession.h"
36 #import "CDMSessionMediaSourceAVFObjC.h"
37 #import "InbandTextTrackPrivateAVFObjC.h"
38 #import "Logging.h"
39 #import "MediaDescription.h"
40 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
41 #import "MediaSample.h"
42 #import "MediaSampleAVFObjC.h"
43 #import "MediaSourcePrivateAVFObjC.h"
44 #import "NotImplemented.h"
45 #import "SharedBuffer.h"
46 #import "SourceBufferPrivateClient.h"
47 #import "TimeRanges.h"
48 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
49 #import "WebCoreDecompressionSession.h"
50 #import <AVFoundation/AVAssetTrack.h>
51 #import <JavaScriptCore/TypedArrayInlines.h>
52 #import <QuartzCore/CALayer.h>
53 #import <objc/runtime.h>
54 #import <pal/avfoundation/MediaTimeAVFoundation.h>
55 #import <pal/spi/mac/AVFoundationSPI.h>
56 #import <wtf/BlockObjCExceptions.h>
57 #import <wtf/HashCountedSet.h>
58 #import <wtf/MainThread.h>
59 #import <wtf/SoftLinking.h>
60 #import <wtf/WTFSemaphore.h>
61 #import <wtf/WeakPtr.h>
62 #import <wtf/text/AtomicString.h>
63 #import <wtf/text/CString.h>
64
65 #pragma mark - Soft Linking
66
67 #import <pal/cf/CoreMediaSoftLink.h>
68
69 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
70
71 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
72 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
73 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
74 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
75 ALLOW_NEW_API_WITHOUT_GUARDS_END
76 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
77 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
82 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
83 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
84
85 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
86 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
87
88 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
89 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
90 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
91
92 #pragma mark -
93 #pragma mark AVStreamSession
94
95 @interface AVStreamSession : NSObject
96 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
97 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
98 @end
99
100 #pragma mark -
101 #pragma mark WebAVStreamDataParserListener
102
103 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
104     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
105     Box<Semaphore> _abortSemaphore;
106     AVStreamDataParser* _parser;
107 }
108 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
109 @property (assign) Box<Semaphore> abortSemaphore;
110 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
111 @end
112
113 @implementation WebAVStreamDataParserListener
114 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
115 {
116     self = [super init];
117     if (!self)
118         return nil;
119
120     ASSERT(parent);
121     _parent = parent;
122     _parser = parser;
123     [_parser setDelegate:self];
124     return self;
125 }
126
127 @synthesize parent=_parent;
128 @synthesize abortSemaphore=_abortSemaphore;
129
130 - (void)dealloc
131 {
132     [_parser setDelegate:nil];
133     [super dealloc];
134 }
135
136 - (void)invalidate
137 {
138     [_parser setDelegate:nil];
139     _parser = nullptr;
140 }
141
142 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
143 {
144     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
145
146     RetainPtr<AVAsset> protectedAsset = asset;
147     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
148         if (parent)
149             parent->didParseStreamDataAsAsset(protectedAsset.get());
150     });
151 }
152
153 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
154 {
155     UNUSED_PARAM(discontinuity);
156     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
157
158     RetainPtr<AVAsset> protectedAsset = asset;
159     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
160         if (parent)
161             parent->didParseStreamDataAsAsset(protectedAsset.get());
162     });
163 }
164
165 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
166 {
167     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
168
169     RetainPtr<NSError> protectedError = error;
170     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
171         if (parent)
172             parent->didFailToParseStreamDataWithError(protectedError.get());
173     });
174 }
175
176 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
177 {
178     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
179
180     RetainPtr<CMSampleBufferRef> protectedSample = sample;
181     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
182         if (parent)
183             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
184     });
185 }
186
187 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
188 {
189     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
190
191     callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
192         if (parent)
193             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
194     });
195 }
196
197 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
198 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
199 IGNORE_WARNINGS_END
200 {
201     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
202
203     // We must call synchronously to the main thread, as the AVStreamSession must be associated
204     // with the streamDataParser before the delegate method returns.
205     Box<BinarySemaphore> respondedSemaphore = Box<BinarySemaphore>::create();
206     callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
207         if (parent)
208             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
209         respondedSemaphore->signal();
210     });
211
212     while (true) {
213         if (respondedSemaphore->waitFor(100_ms))
214             return;
215
216         if (_abortSemaphore->waitFor(100_ms)) {
217             _abortSemaphore->signal();
218             return;
219         }
220     }
221 }
222
223 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
224 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
225 IGNORE_WARNINGS_END
226 {
227     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
228
229     Box<BinarySemaphore> hasSessionSemaphore = Box<BinarySemaphore>::create();
230     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
231         if (parent)
232             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
233     });
234
235     while (true) {
236         if (hasSessionSemaphore->waitFor(100_ms))
237             return;
238
239         if (_abortSemaphore->waitFor(100_ms)) {
240             _abortSemaphore->signal();
241             return;
242         }
243     }
244 }
245 @end
246
247 @interface WebAVSampleBufferErrorListener : NSObject {
248     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
249     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
250     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
251     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
252     ALLOW_NEW_API_WITHOUT_GUARDS_END
253 }
254
255 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent;
256 - (void)invalidate;
257 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
258 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
259 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
260 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
261 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
262 ALLOW_NEW_API_WITHOUT_GUARDS_END
263 @end
264
265 @implementation WebAVSampleBufferErrorListener
266
267 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent
268 {
269     if (!(self = [super init]))
270         return nil;
271
272     _parent = WTFMove(parent);
273     return self;
274 }
275
276 - (void)dealloc
277 {
278     [self invalidate];
279     [super dealloc];
280 }
281
282 - (void)invalidate
283 {
284     if (!_parent && !_layers.size() && !_renderers.size())
285         return;
286
287     for (auto& layer : _layers) {
288         [layer removeObserver:self forKeyPath:@"error"];
289         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
290     }
291     _layers.clear();
292
293     for (auto& renderer : _renderers)
294         [renderer removeObserver:self forKeyPath:@"error"];
295     _renderers.clear();
296
297     [[NSNotificationCenter defaultCenter] removeObserver:self];
298
299     _parent = nullptr;
300 }
301
302 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
303 {
304     ASSERT(_parent);
305     ASSERT(!_layers.contains(layer));
306
307     _layers.append(layer);
308     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
309     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
310     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
311 }
312
313 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
314 {
315     ASSERT(_parent);
316     ASSERT(_layers.contains(layer));
317
318     [layer removeObserver:self forKeyPath:@"error"];
319     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
320     _layers.remove(_layers.find(layer));
321
322     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
323 }
324
325 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
326 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
327 {
328 ALLOW_NEW_API_WITHOUT_GUARDS_END
329     ASSERT(_parent);
330     ASSERT(!_renderers.contains(renderer));
331
332     _renderers.append(renderer);
333     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
334 }
335
336 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
337 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
338 ALLOW_NEW_API_WITHOUT_GUARDS_END
339 {
340     ASSERT(_parent);
341     ASSERT(_renderers.contains(renderer));
342
343     [renderer removeObserver:self forKeyPath:@"error"];
344     _renderers.remove(_renderers.find(renderer));
345 }
346
347 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
348 {
349     UNUSED_PARAM(context);
350     UNUSED_PARAM(keyPath);
351     ASSERT(_parent);
352
353     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
354         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
355         ASSERT(_layers.contains(layer.get()));
356
357         if ([keyPath isEqualToString:@"error"]) {
358             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
359             if ([error isKindOfClass:[NSNull class]])
360                 return;
361
362             callOnMainThread([parent = _parent, layer = WTFMove(layer), error = WTFMove(error)] {
363                 if (parent)
364                     parent->layerDidReceiveError(layer.get(), error.get());
365             });
366         } else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"]) {
367             callOnMainThread([parent = _parent, obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
368                 if (parent)
369                     parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
370             });
371         } else
372             ASSERT_NOT_REACHED();
373
374     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
375         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
376         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
377         ALLOW_NEW_API_WITHOUT_GUARDS_END
378         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
379         if ([error isKindOfClass:[NSNull class]])
380             return;
381
382         ASSERT(_renderers.contains(renderer.get()));
383         ASSERT([keyPath isEqualToString:@"error"]);
384
385         callOnMainThread([parent = _parent, renderer = WTFMove(renderer), error = WTFMove(error)] {
386             if (parent)
387                 parent->rendererDidReceiveError(renderer.get(), error.get());
388         });
389     } else
390         ASSERT_NOT_REACHED();
391 }
392
393 - (void)layerFailedToDecode:(NSNotification*)note
394 {
395     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
396     if (!_layers.contains(layer.get()))
397         return;
398
399     callOnMainThread([parent = _parent, layer = WTFMove(layer), error = retainPtr([[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey])] {
400         if (parent)
401             parent->layerDidReceiveError(layer.get(), error.get());
402     });
403 }
404 @end
405
406 namespace WebCore {
407 using namespace PAL;
408
409 #pragma mark -
410 #pragma mark MediaDescriptionAVFObjC
411
412 class MediaDescriptionAVFObjC final : public MediaDescription {
413 public:
414     static Ref<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(*new MediaDescriptionAVFObjC(track)); }
415     virtual ~MediaDescriptionAVFObjC() { }
416
417     AtomicString codec() const override { return m_codec; }
418     bool isVideo() const override { return m_isVideo; }
419     bool isAudio() const override { return m_isAudio; }
420     bool isText() const override { return m_isText; }
421     
422 protected:
423     MediaDescriptionAVFObjC(AVAssetTrack* track)
424         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
425         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
426         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
427     {
428         NSArray* formatDescriptions = [track formatDescriptions];
429         CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
430         if (description) {
431             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
432             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
433         }
434     }
435
436     AtomicString m_codec;
437     bool m_isVideo;
438     bool m_isAudio;
439     bool m_isText;
440 };
441
442 #pragma mark -
443 #pragma mark SourceBufferPrivateAVFObjC
444
445 static HashMap<uint64_t, WeakPtr<SourceBufferPrivateAVFObjC>>& sourceBufferMap()
446 {
447     static NeverDestroyed<HashMap<uint64_t, WeakPtr<SourceBufferPrivateAVFObjC>>> map;
448     return map;
449 }
450
451 static uint64_t nextMapID()
452 {
453     static uint64_t mapID = 0;
454     return ++mapID;
455 }
456
457 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void* listener, CFStringRef notificationName, const void*, CFTypeRef)
458 {
459     if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
460         return;
461
462     if (!isMainThread()) {
463         callOnMainThread([notificationName, listener] {
464             bufferWasConsumedCallback(nullptr, listener, notificationName, nullptr, nullptr);
465         });
466         return;
467     }
468
469     uint64_t mapID = reinterpret_cast<uint64_t>(listener);
470     if (!mapID) {
471         RELEASE_LOG(MediaSource, "bufferWasConsumedCallback - ERROR: didn't find ID %llu in map", mapID);
472         return;
473     }
474
475     if (auto sourceBuffer = sourceBufferMap().get(mapID).get())
476         sourceBuffer->bufferWasConsumed();
477 }
478
479 Ref<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
480 {
481     return adoptRef(*new SourceBufferPrivateAVFObjC(parent));
482 }
483
484 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
485     : m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
486     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
487     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:createWeakPtr()]))
488     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
489     , m_mediaSource(parent)
490     , m_mapID(nextMapID())
491 #if !RELEASE_LOG_DISABLED
492     , m_logger(parent->logger())
493     , m_logIdentifier(parent->nextSourceBufferLogIdentifier())
494 #endif
495 {
496     ALWAYS_LOG(LOGIDENTIFIER);
497
498     CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), reinterpret_cast<void*>(m_mapID), bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
499     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
500
501     sourceBufferMap().add(m_mapID, makeWeakPtr(*this));
502 }
503
504 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
505 {
506     ALWAYS_LOG(LOGIDENTIFIER);
507
508     ASSERT(!m_client);
509     sourceBufferMap().remove(m_mapID);
510     destroyParser();
511     destroyRenderers();
512
513     CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
514
515     if (m_hasSessionSemaphore)
516         m_hasSessionSemaphore->signal();
517 }
518
519 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
520 {
521     ALWAYS_LOG(LOGIDENTIFIER);
522
523     if (!m_mediaSource)
524         return;
525
526     if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
527         for (AVAssetTrack *track in [asset tracks]) {
528             if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
529                 m_parsingSucceeded = false;
530                 return;
531             }
532         }
533     }
534
535     m_asset = asset;
536
537     m_videoTracks.clear();
538     m_audioTracks.clear();
539
540     m_discardSamplesUntilNextInitializationSegment = false;
541
542     SourceBufferPrivateClient::InitializationSegment segment;
543
544     if ([m_asset respondsToSelector:@selector(overallDurationHint)])
545         segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
546
547     if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
548         segment.duration = PAL::toMediaTime([m_asset duration]);
549
550     for (AVAssetTrack* track in [m_asset tracks]) {
551         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
552             // FIXME(125161): Handle in-band text tracks.
553             continue;
554         }
555
556         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
557             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
558             auto videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
559             info.track = videoTrack.copyRef();
560             m_videoTracks.append(WTFMove(videoTrack));
561             info.description = MediaDescriptionAVFObjC::create(track);
562             segment.videoTracks.append(info);
563         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
564             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
565             auto audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
566             info.track = audioTrack.copyRef();
567             m_audioTracks.append(WTFMove(audioTrack));
568             info.description = MediaDescriptionAVFObjC::create(track);
569             segment.audioTracks.append(info);
570         }
571
572         // FIXME(125161): Add TextTrack support
573     }
574
575     if (m_mediaSource)
576         m_mediaSource->player()->characteristicsChanged();
577
578     if (m_client)
579         m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
580 }
581
582 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
583 {
584 #if LOG_DISABLED
585     UNUSED_PARAM(error);
586 #endif
587     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
588
589     m_parsingSucceeded = false;
590 }
591
592 struct ProcessCodedFrameInfo {
593     SourceBufferPrivateAVFObjC* sourceBuffer;
594     int trackID;
595     const String& mediaType;
596 };
597
598 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
599 {
600     processCodedFrame(trackID, sampleBuffer, mediaType);
601 }
602
603 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
604 {
605     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
606         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
607         // will just confuse its state. Drop this sample until we can handle text tracks properly.
608         return false;
609     }
610
611     if (m_discardSamplesUntilNextInitializationSegment)
612         return false;
613
614     if (m_client) {
615         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
616         DEBUG_LOG(LOGIDENTIFIER, mediaSample.get());
617         m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
618     }
619
620     return true;
621 }
622
623 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
624 {
625     notImplemented();
626 }
627
628 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
629 {
630     if (!m_mediaSource)
631         return;
632
633     ASSERT(m_parser);
634
635 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
636     ALWAYS_LOG(LOGIDENTIFIER, "track = ", trackID);
637
638     m_protectedTrackID = trackID;
639
640     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
641         session->addParser(m_parser.get());
642     else if (!CDMSessionAVContentKeySession::isAvailable()) {
643         BEGIN_BLOCK_OBJC_EXCEPTIONS;
644         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
645         END_BLOCK_OBJC_EXCEPTIONS;
646     }
647 #else
648     UNUSED_PARAM(trackID);
649 #endif
650 }
651
652 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, Box<BinarySemaphore> hasSessionSemaphore)
653 {
654     if (!m_mediaSource)
655         return;
656
657 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
658     ALWAYS_LOG(LOGIDENTIFIER, "track = ", trackID);
659
660     m_protectedTrackID = trackID;
661     auto initDataArray = Uint8Array::create([initData length]);
662     [initData getBytes:initDataArray->data() length:initDataArray->length()];
663     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.ptr());
664     if (auto session = m_mediaSource->player()->cdmSession()) {
665         session->addParser(m_parser.get());
666         hasSessionSemaphore->signal();
667         return;
668     }
669 #endif
670
671     if (m_hasSessionSemaphore)
672         m_hasSessionSemaphore->signal();
673     m_hasSessionSemaphore = hasSessionSemaphore;
674     
675 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
676     auto initDataBuffer = SharedBuffer::create(initData);
677     auto keyIDs = CDMPrivateFairPlayStreaming::extractKeyIDsSinf(initDataBuffer);
678     if (!keyIDs)
679         return;
680
681     if (m_cdmInstance) {
682         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(keyIDs.value())) {
683             [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
684             if (m_hasSessionSemaphore) {
685                 m_hasSessionSemaphore->signal();
686                 m_hasSessionSemaphore = nullptr;
687             }
688             m_waitingForKey = false;
689             return;
690         }
691     }
692
693     m_keyIDs = WTFMove(keyIDs.value());
694     m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
695
696     m_waitingForKey = true;
697     m_mediaSource->player()->waitingForKeyChanged();
698 #endif
699
700     UNUSED_PARAM(initData);
701     UNUSED_PARAM(trackID);
702     UNUSED_PARAM(hasSessionSemaphore);
703 }
704
705 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
706 {
707     m_client = client;
708 }
709
710 static dispatch_queue_t globalDataParserQueue()
711 {
712     static dispatch_queue_t globalQueue;
713     static dispatch_once_t onceToken;
714     dispatch_once(&onceToken, ^{
715         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
716     });
717     return globalQueue;
718 }
719
720 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
721 {
722     DEBUG_LOG(LOGIDENTIFIER, "data length = ", data.size());
723
724     // FIXME: Avoid the data copy by wrapping around the Vector<> object.
725     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data.data() length:data.size()]);
726     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr(*this);
727     RetainPtr<AVStreamDataParser> parser = m_parser;
728     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
729
730     m_parsingSucceeded = true;
731     dispatch_group_enter(m_isAppendingGroup.get());
732
733     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
734         if (parserStateWasReset)
735             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
736         else
737             [parser appendStreamData:nsData.get()];
738
739         callOnMainThread([weakThis] {
740             if (weakThis)
741                 weakThis->appendCompleted();
742         });
743         dispatch_group_leave(isAppendingGroup.get());
744     });
745     m_parserStateWasReset = false;
746 }
747
748 void SourceBufferPrivateAVFObjC::appendCompleted()
749 {
750     if (m_parsingSucceeded && m_mediaSource)
751         m_mediaSource->player()->setLoadingProgresssed(true);
752
753     if (m_client)
754         m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
755 }
756
757 void SourceBufferPrivateAVFObjC::abort()
758 {
759     ALWAYS_LOG(LOGIDENTIFIER);
760
761     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
762     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
763     // semaphore, the m_isAppendingGroup wait operation will deadlock.
764     if (m_hasSessionSemaphore)
765         m_hasSessionSemaphore->signal();
766     m_delegate.get().abortSemaphore->signal();
767     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
768     m_appendWeakFactory.revokeAll();
769     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
770     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
771 }
772
773 void SourceBufferPrivateAVFObjC::resetParserState()
774 {
775     ALWAYS_LOG(LOGIDENTIFIER);
776
777     m_parserStateWasReset = true;
778     m_discardSamplesUntilNextInitializationSegment = true;
779 }
780
781 void SourceBufferPrivateAVFObjC::destroyParser()
782 {
783 #if HAVE(AVSTREAMSESSION) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
784     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
785         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
786 #endif
787 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
788     if (m_cdmInstance) {
789         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs))
790             [instanceSession->contentKeySession() removeContentKeyRecipient:m_parser.get()];
791     }
792 #endif
793
794     [m_delegate invalidate];
795     m_delegate = nullptr;
796     m_parser = nullptr;
797 }
798
799 void SourceBufferPrivateAVFObjC::destroyRenderers()
800 {
801     if (m_displayLayer)
802         setVideoLayer(nullptr);
803
804     if (m_decompressionSession)
805         setDecompressionSession(nullptr);
806
807     for (auto& renderer : m_audioRenderers.values()) {
808         if (m_mediaSource)
809             m_mediaSource->player()->removeAudioRenderer(renderer.get());
810         [renderer flush];
811         [renderer stopRequestingMediaData];
812         [m_errorListener stopObservingRenderer:renderer.get()];
813     }
814
815     [m_errorListener invalidate];
816     m_errorListener = nullptr;
817
818     m_audioRenderers.clear();
819 }
820
821 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
822 {
823     ALWAYS_LOG(LOGIDENTIFIER);
824
825     destroyParser();
826     destroyRenderers();
827
828     if (m_mediaSource)
829         m_mediaSource->removeSourceBuffer(this);
830 }
831
832 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
833 {
834     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
835 }
836
837 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
838 {
839     ALWAYS_LOG(LOGIDENTIFIER, readyState);
840
841     if (m_mediaSource)
842         m_mediaSource->player()->setReadyState(readyState);
843 }
844
845 bool SourceBufferPrivateAVFObjC::hasVideo() const
846 {
847     return m_client && m_client->sourceBufferPrivateHasVideo();
848 }
849
850 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
851 {
852     return m_enabledVideoTrackID != -1;
853 }
854
855 bool SourceBufferPrivateAVFObjC::hasAudio() const
856 {
857     return m_client && m_client->sourceBufferPrivateHasAudio();
858 }
859
860 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
861 {
862     int trackID = track->trackID();
863
864     ALWAYS_LOG(LOGIDENTIFIER, "video trackID = ", trackID, ", selected = ", track->selected());
865
866     if (!track->selected() && m_enabledVideoTrackID == trackID) {
867         m_enabledVideoTrackID = -1;
868         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
869
870         if (m_decompressionSession)
871             m_decompressionSession->stopRequestingMediaData();
872     } else if (track->selected()) {
873         m_enabledVideoTrackID = trackID;
874         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
875
876         if (m_decompressionSession) {
877             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
878                 didBecomeReadyForMoreSamples(trackID);
879             });
880         }
881     }
882
883     m_mediaSource->hasSelectedVideoChanged(*this);
884 }
885
886 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
887 {
888     int trackID = track->trackID();
889
890     ALWAYS_LOG(LOGIDENTIFIER, "audio trackID = ", trackID, ", selected = ", track->enabled());
891
892     if (!track->enabled()) {
893         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
894         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
895         ALLOW_NEW_API_WITHOUT_GUARDS_END
896         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
897         if (m_mediaSource)
898             m_mediaSource->player()->removeAudioRenderer(renderer.get());
899     } else {
900         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
901         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
902         RetainPtr<AVSampleBufferAudioRenderer> renderer;
903         ALLOW_NEW_API_WITHOUT_GUARDS_END
904         if (!m_audioRenderers.contains(trackID)) {
905             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
906             auto weakThis = createWeakPtr();
907             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
908                 if (weakThis)
909                     weakThis->didBecomeReadyForMoreSamples(trackID);
910             }];
911             m_audioRenderers.set(trackID, renderer);
912             [m_errorListener beginObservingRenderer:renderer.get()];
913         } else
914             renderer = m_audioRenderers.get(trackID);
915
916         if (m_mediaSource)
917             m_mediaSource->player()->addAudioRenderer(renderer.get());
918     }
919 }
920
921 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
922 {
923 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
924     if (session == m_session)
925         return;
926
927     ALWAYS_LOG(LOGIDENTIFIER);
928
929     if (m_session)
930         m_session->removeSourceBuffer(this);
931
932     m_session = makeWeakPtr(session);
933
934     if (m_session) {
935         m_session->addSourceBuffer(this);
936         if (m_hasSessionSemaphore) {
937             m_hasSessionSemaphore->signal();
938             m_hasSessionSemaphore = nullptr;
939         }
940
941         if (m_hdcpError) {
942             WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
943             callOnMainThread([weakThis] {
944                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
945                     return;
946
947                 bool ignored = false;
948                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
949             });
950         }
951     }
952 #else
953     UNUSED_PARAM(session);
954 #endif
955 }
956
957 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
958 {
959 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
960     auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
961     if (fpsInstance == m_cdmInstance)
962         return;
963
964     ALWAYS_LOG(LOGIDENTIFIER);
965
966     m_cdmInstance = fpsInstance;
967     attemptToDecrypt();
968 #else
969     UNUSED_PARAM(instance);
970 #endif
971 }
972
973 void SourceBufferPrivateAVFObjC::attemptToDecrypt()
974 {
975 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
976     if (!m_cdmInstance || m_keyIDs.isEmpty() || !m_waitingForKey)
977         return;
978
979     auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs);
980     if (!instanceSession)
981         return;
982
983     [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
984     if (m_hasSessionSemaphore) {
985         m_hasSessionSemaphore->signal();
986         m_hasSessionSemaphore = nullptr;
987     }
988     m_waitingForKey = false;
989 #endif
990 }
991
992 void SourceBufferPrivateAVFObjC::flush()
993 {
994     flushVideo();
995
996     for (auto& renderer : m_audioRenderers.values())
997         flush(renderer.get());
998 }
999
1000 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
1001 {
1002     ASSERT(!m_errorClients.contains(client));
1003     m_errorClients.append(client);
1004 }
1005
1006 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
1007 {
1008     ASSERT(m_errorClients.contains(client));
1009     m_errorClients.remove(m_errorClients.find(client));
1010 }
1011
1012 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
1013 {
1014     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
1015
1016     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
1017     bool anyIgnored = false;
1018     for (auto& client : m_errorClients) {
1019         bool shouldIgnore = false;
1020         client->layerDidReceiveError(layer, error, shouldIgnore);
1021         anyIgnored |= shouldIgnore;
1022     }
1023     if (anyIgnored)
1024         return;
1025
1026     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
1027
1028     if (m_client)
1029         m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
1030 }
1031
1032 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
1033 {
1034 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1035     if (m_mediaSource && m_mediaSource->cdmInstance()) {
1036         m_mediaSource->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
1037         return;
1038     }
1039 #else
1040     UNUSED_PARAM(obscured);
1041 #endif
1042
1043     ERROR_LOG(LOGIDENTIFIER, obscured);
1044
1045     RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
1046     layerDidReceiveError(m_displayLayer.get(), error.get());
1047 }
1048
1049 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1050 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
1051 ALLOW_NEW_API_WITHOUT_GUARDS_END
1052 {
1053     ERROR_LOG(LOGIDENTIFIER, [[error description] UTF8String]);
1054
1055     if ([error code] == 'HDCP')
1056         m_hdcpError = error;
1057
1058     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
1059     bool anyIgnored = false;
1060     for (auto& client : m_errorClients) {
1061         bool shouldIgnore = false;
1062         client->rendererDidReceiveError(renderer, error, shouldIgnore);
1063         anyIgnored |= shouldIgnore;
1064     }
1065     if (anyIgnored)
1066         return;
1067 }
1068
1069 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1070 {
1071     int trackID = trackIDString.toInt();
1072     DEBUG_LOG(LOGIDENTIFIER, trackID);
1073
1074     if (trackID == m_enabledVideoTrackID) {
1075         flushVideo();
1076     } else if (m_audioRenderers.contains(trackID))
1077         flush(m_audioRenderers.get(trackID).get());
1078 }
1079
1080 void SourceBufferPrivateAVFObjC::flushVideo()
1081 {
1082     DEBUG_LOG(LOGIDENTIFIER);
1083     [m_displayLayer flush];
1084
1085     if (m_decompressionSession) {
1086         m_decompressionSession->flush();
1087         m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1088             if (weakThis && weakThis->m_mediaSource)
1089                 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1090         });
1091     }
1092
1093     m_cachedSize = WTF::nullopt;
1094
1095     if (m_mediaSource) {
1096         m_mediaSource->player()->setHasAvailableVideoFrame(false);
1097         m_mediaSource->player()->flushPendingSizeChanges();
1098     }
1099 }
1100
1101 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1102 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1103 ALLOW_NEW_API_WITHOUT_GUARDS_END
1104 {
1105     [renderer flush];
1106
1107     if (m_mediaSource)
1108         m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1109 }
1110
1111 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1112 {
1113     int trackID = trackIDString.toInt();
1114     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1115         return;
1116
1117     PlatformSample platformSample = sample->platformSample();
1118     if (platformSample.type != PlatformSample::CMSampleBufferType)
1119         return;
1120
1121     DEBUG_LOG(LOGIDENTIFIER, "track ID = ", trackID, ", sample = ", sample.get());
1122
1123     if (trackID == m_enabledVideoTrackID) {
1124         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1125         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1126         if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1127             DEBUG_LOG(LOGIDENTIFIER, "size changed to ", formatSize);
1128             bool sizeWasNull = !m_cachedSize;
1129             m_cachedSize = formatSize;
1130             if (m_mediaSource) {
1131                 if (sizeWasNull)
1132                     m_mediaSource->player()->setNaturalSize(formatSize);
1133                 else
1134                     m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1135             }
1136         }
1137
1138         if (m_decompressionSession)
1139             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1140
1141         if (!m_displayLayer)
1142             return;
1143
1144         if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1145             DEBUG_LOG(LOGIDENTIFIER, "adding buffer attachment");
1146
1147             CMSampleBufferRef rawSampleCopy;
1148             CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1149             auto sampleCopy = adoptCF(rawSampleCopy);
1150             CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{ (__bridge NSString *)kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed : @(YES) }, kCMAttachmentMode_ShouldNotPropagate);
1151             [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1152 #if PLATFORM(IOS_FAMILY)
1153             m_mediaSource->player()->setHasAvailableVideoFrame(true);
1154 #endif
1155         } else
1156             [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1157
1158     } else {
1159         auto renderer = m_audioRenderers.get(trackID);
1160         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1161         if (m_mediaSource && !sample->isNonDisplaying())
1162             m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1163     }
1164 }
1165
1166 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1167 {
1168     DEBUG_LOG(LOGIDENTIFIER);
1169
1170     if (m_mediaSource)
1171         m_mediaSource->player()->setHasAvailableVideoFrame(true);
1172 }
1173
1174 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1175 {
1176     int trackID = trackIDString.toInt();
1177     if (trackID == m_enabledVideoTrackID) {
1178         if (m_decompressionSession)
1179             return m_decompressionSession->isReadyForMoreMediaData();
1180
1181         return [m_displayLayer isReadyForMoreMediaData];
1182     }
1183
1184     if (m_audioRenderers.contains(trackID))
1185         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1186
1187     return false;
1188 }
1189
1190 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1191 {
1192     ALWAYS_LOG(LOGIDENTIFIER, isActive);
1193     if (m_mediaSource)
1194         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1195 }
1196
1197 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
1198 {
1199     if (!m_client)
1200         return time;
1201     return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1202 }
1203
1204 void SourceBufferPrivateAVFObjC::willSeek()
1205 {
1206     ALWAYS_LOG(LOGIDENTIFIER);
1207     flush();
1208 }
1209
1210 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1211 {
1212     return m_cachedSize.valueOr(FloatSize());
1213 }
1214
1215 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1216 {
1217     INFO_LOG(LOGIDENTIFIER, trackID);
1218
1219     if (trackID == m_enabledVideoTrackID) {
1220         if (m_decompressionSession)
1221             m_decompressionSession->stopRequestingMediaData();
1222         [m_displayLayer stopRequestingMediaData];
1223     } else if (m_audioRenderers.contains(trackID))
1224         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1225     else
1226         return;
1227
1228     if (m_client)
1229         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1230 }
1231
1232 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1233 {
1234     int trackID = trackIDString.toInt();
1235     if (trackID == m_enabledVideoTrackID) {
1236         if (m_decompressionSession) {
1237             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1238                 didBecomeReadyForMoreSamples(trackID);
1239             });
1240         }
1241         if (m_displayLayer) {
1242             auto weakThis = createWeakPtr();
1243             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1244                 if (weakThis)
1245                     weakThis->didBecomeReadyForMoreSamples(trackID);
1246             }];
1247         }
1248     } else if (m_audioRenderers.contains(trackID)) {
1249         auto weakThis = createWeakPtr();
1250         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1251             if (weakThis)
1252                 weakThis->didBecomeReadyForMoreSamples(trackID);
1253         }];
1254     }
1255 }
1256
1257 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1258 {
1259     ALWAYS_LOG(LOGIDENTIFIER, contentType);
1260
1261     MediaEngineSupportParameters parameters;
1262     parameters.isMediaSource = true;
1263     parameters.type = contentType;
1264     return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1265 }
1266
1267 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1268 {
1269     if (layer == m_displayLayer)
1270         return;
1271
1272     ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1273
1274     if (m_displayLayer) {
1275         [m_displayLayer flush];
1276         [m_displayLayer stopRequestingMediaData];
1277         [m_errorListener stopObservingLayer:m_displayLayer.get()];
1278     }
1279
1280     m_displayLayer = layer;
1281
1282     if (m_displayLayer) {
1283         auto weakThis = createWeakPtr();
1284         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1285             if (weakThis)
1286                 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1287         }];
1288         [m_errorListener beginObservingLayer:m_displayLayer.get()];
1289         if (m_client)
1290             m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1291     }
1292 }
1293
1294 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1295 {
1296     if (m_decompressionSession == decompressionSession)
1297         return;
1298
1299     ALWAYS_LOG(LOGIDENTIFIER);
1300
1301     if (m_decompressionSession) {
1302         m_decompressionSession->stopRequestingMediaData();
1303         m_decompressionSession->invalidate();
1304     }
1305
1306     m_decompressionSession = decompressionSession;
1307
1308     if (!m_decompressionSession)
1309         return;
1310
1311     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1312     m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1313         if (weakThis)
1314             weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1315     });
1316     m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1317         if (weakThis && weakThis->m_mediaSource)
1318             weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1319     });
1320     if (m_client)
1321         m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1322 }
1323
1324 #if !RELEASE_LOG_DISABLED
1325 WTFLogChannel& SourceBufferPrivateAVFObjC::logChannel() const
1326 {
1327     return LogMediaSource;
1328 }
1329 #endif
1330
1331 }
1332
1333 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)