a2e07cf6f3217b3adf895c92b0239494294585c1
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMFairPlayStreaming.h"
34 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
35 #import "CDMSessionAVContentKeySession.h"
36 #import "CDMSessionMediaSourceAVFObjC.h"
37 #import "InbandTextTrackPrivateAVFObjC.h"
38 #import "Logging.h"
39 #import "MediaDescription.h"
40 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
41 #import "MediaSample.h"
42 #import "MediaSampleAVFObjC.h"
43 #import "MediaSourcePrivateAVFObjC.h"
44 #import "NotImplemented.h"
45 #import "SharedBuffer.h"
46 #import "SourceBufferPrivateClient.h"
47 #import "TimeRanges.h"
48 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
49 #import "WebCoreDecompressionSession.h"
50 #import <AVFoundation/AVAssetTrack.h>
51 #import <JavaScriptCore/TypedArrayInlines.h>
52 #import <QuartzCore/CALayer.h>
53 #import <objc/runtime.h>
54 #import <pal/avfoundation/MediaTimeAVFoundation.h>
55 #import <pal/spi/mac/AVFoundationSPI.h>
56 #import <wtf/BlockObjCExceptions.h>
57 #import <wtf/HashCountedSet.h>
58 #import <wtf/MainThread.h>
59 #import <wtf/SoftLinking.h>
60 #import <wtf/WTFSemaphore.h>
61 #import <wtf/WeakPtr.h>
62 #import <wtf/text/AtomicString.h>
63 #import <wtf/text/CString.h>
64
65 #pragma mark - Soft Linking
66
67 #import <pal/cf/CoreMediaSoftLink.h>
68
69 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
70
71 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
72 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
73 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
74 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
75 ALLOW_NEW_API_WITHOUT_GUARDS_END
76 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
77 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
82 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
83 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
84
85 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
86 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
87
88 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
89 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
90 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
91
92 #pragma mark -
93 #pragma mark AVStreamSession
94
95 @interface AVStreamSession : NSObject
96 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
97 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
98 @end
99
100 #pragma mark -
101 #pragma mark WebAVStreamDataParserListener
102
103 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
104     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
105     Box<Semaphore> _abortSemaphore;
106     AVStreamDataParser* _parser;
107 }
108 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
109 @property (assign) Box<Semaphore> abortSemaphore;
110 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
111 @end
112
113 @implementation WebAVStreamDataParserListener
114 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
115 {
116     self = [super init];
117     if (!self)
118         return nil;
119
120     ASSERT(parent);
121     _parent = parent;
122     _parser = parser;
123     [_parser setDelegate:self];
124     return self;
125 }
126
127 @synthesize parent=_parent;
128 @synthesize abortSemaphore=_abortSemaphore;
129
130 - (void)dealloc
131 {
132     [_parser setDelegate:nil];
133     [super dealloc];
134 }
135
136 - (void)invalidate
137 {
138     [_parser setDelegate:nil];
139     _parser = nullptr;
140 }
141
142 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
143 {
144     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
145
146     RetainPtr<AVAsset*> protectedAsset = asset;
147     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
148         if (parent)
149             parent->didParseStreamDataAsAsset(protectedAsset.get());
150     });
151 }
152
153 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
154 {
155     UNUSED_PARAM(discontinuity);
156     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
157
158     RetainPtr<AVAsset*> protectedAsset = asset;
159     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
160         if (parent)
161             parent->didParseStreamDataAsAsset(protectedAsset.get());
162     });
163 }
164
165 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
166 {
167     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
168
169     RetainPtr<NSError> protectedError = error;
170     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
171         if (parent)
172             parent->didFailToParseStreamDataWithError(protectedError.get());
173     });
174 }
175
176 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
177 {
178     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
179
180     RetainPtr<CMSampleBufferRef> protectedSample = sample;
181     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
182         if (parent)
183             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
184     });
185 }
186
187 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
188 {
189     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
190
191     callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
192         if (parent)
193             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
194     });
195 }
196
197 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
198 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
199 IGNORE_WARNINGS_END
200 {
201     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
202
203     // We must call synchronously to the main thread, as the AVStreamSession must be associated
204     // with the streamDataParser before the delegate method returns.
205     Box<BinarySemaphore> respondedSemaphore = Box<BinarySemaphore>::create();
206     callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
207         if (parent)
208             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
209         respondedSemaphore->signal();
210     });
211
212     while (true) {
213         if (respondedSemaphore->waitFor(100_ms))
214             return;
215
216         if (_abortSemaphore->waitFor(100_ms)) {
217             _abortSemaphore->signal();
218             return;
219         }
220     }
221 }
222
223 IGNORE_WARNINGS_BEGIN("deprecated-implementations")
224 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
225 IGNORE_WARNINGS_END
226 {
227     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
228
229     Box<BinarySemaphore> hasSessionSemaphore = Box<BinarySemaphore>::create();
230     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
231         if (parent)
232             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
233     });
234
235     while (true) {
236         if (hasSessionSemaphore->waitFor(100_ms))
237             return;
238
239         if (_abortSemaphore->waitFor(100_ms)) {
240             _abortSemaphore->signal();
241             return;
242         }
243     }
244 }
245 @end
246
247 @interface WebAVSampleBufferErrorListener : NSObject {
248     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
249     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
250     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
251     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
252     ALLOW_NEW_API_WITHOUT_GUARDS_END
253 }
254
255 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent;
256 - (void)invalidate;
257 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
258 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
259 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
260 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
261 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
262 ALLOW_NEW_API_WITHOUT_GUARDS_END
263 @end
264
265 @implementation WebAVSampleBufferErrorListener
266
267 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>&&)parent
268 {
269     if (!(self = [super init]))
270         return nil;
271
272     _parent = WTFMove(parent);
273     return self;
274 }
275
276 - (void)dealloc
277 {
278     [self invalidate];
279     [super dealloc];
280 }
281
282 - (void)invalidate
283 {
284     if (!_parent && !_layers.size() && !_renderers.size())
285         return;
286
287     for (auto& layer : _layers) {
288         [layer removeObserver:self forKeyPath:@"error"];
289         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
290     }
291     _layers.clear();
292
293     for (auto& renderer : _renderers)
294         [renderer removeObserver:self forKeyPath:@"error"];
295     _renderers.clear();
296
297     [[NSNotificationCenter defaultCenter] removeObserver:self];
298
299     _parent = nullptr;
300 }
301
302 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
303 {
304     ASSERT(_parent);
305     ASSERT(!_layers.contains(layer));
306
307     _layers.append(layer);
308     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
309     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
310     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
311 }
312
313 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
314 {
315     ASSERT(_parent);
316     ASSERT(_layers.contains(layer));
317
318     [layer removeObserver:self forKeyPath:@"error"];
319     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
320     _layers.remove(_layers.find(layer));
321
322     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
323 }
324
325 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
326 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
327 {
328 ALLOW_NEW_API_WITHOUT_GUARDS_END
329     ASSERT(_parent);
330     ASSERT(!_renderers.contains(renderer));
331
332     _renderers.append(renderer);
333     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
334 }
335
336 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
337 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
338 ALLOW_NEW_API_WITHOUT_GUARDS_END
339 {
340     ASSERT(_parent);
341     ASSERT(_renderers.contains(renderer));
342
343     [renderer removeObserver:self forKeyPath:@"error"];
344     _renderers.remove(_renderers.find(renderer));
345 }
346
347 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
348 {
349     UNUSED_PARAM(context);
350     UNUSED_PARAM(keyPath);
351     ASSERT(_parent);
352
353     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
354         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
355         ASSERT(_layers.contains(layer.get()));
356
357         if ([keyPath isEqualTo:@"error"]) {
358             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
359             callOnMainThread([parent = _parent, layer = WTFMove(layer), error = WTFMove(error)] {
360                 if (parent)
361                     parent->layerDidReceiveError(layer.get(), error.get());
362             });
363         } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
364             callOnMainThread([parent = _parent, obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
365                 if (parent)
366                     parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
367             });
368         } else
369             ASSERT_NOT_REACHED();
370
371     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
372         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
373         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
374         ALLOW_NEW_API_WITHOUT_GUARDS_END
375         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
376
377         ASSERT(_renderers.contains(renderer.get()));
378         ASSERT([keyPath isEqualTo:@"error"]);
379
380         callOnMainThread([parent = _parent, renderer = WTFMove(renderer), error = WTFMove(error)] {
381             if (parent)
382                 parent->rendererDidReceiveError(renderer.get(), error.get());
383         });
384     } else
385         ASSERT_NOT_REACHED();
386 }
387
388 - (void)layerFailedToDecode:(NSNotification*)note
389 {
390     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
391     if (!_layers.contains(layer.get()))
392         return;
393
394     callOnMainThread([parent = _parent, layer = WTFMove(layer), error = retainPtr([[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey])] {
395         if (parent)
396             parent->layerDidReceiveError(layer.get(), error.get());
397     });
398 }
399 @end
400
401 #pragma mark -
402
403 @interface WebBufferConsumedContext : NSObject {
404     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
405 }
406 @property (readonly) WebCore::SourceBufferPrivateAVFObjC* parent;
407 @end
408
409 @implementation WebBufferConsumedContext
410 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
411 {
412     self = [super init];
413     if (self)
414         _parent = parent;
415     return self;
416 }
417
418 @dynamic parent;
419 - (WebCore::SourceBufferPrivateAVFObjC*)parent
420 {
421     return _parent.get();
422 }
423 @end
424
425 namespace WebCore {
426 using namespace PAL;
427
428 #pragma mark -
429 #pragma mark MediaDescriptionAVFObjC
430
431 class MediaDescriptionAVFObjC final : public MediaDescription {
432 public:
433     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
434     virtual ~MediaDescriptionAVFObjC() { }
435
436     AtomicString codec() const override { return m_codec; }
437     bool isVideo() const override { return m_isVideo; }
438     bool isAudio() const override { return m_isAudio; }
439     bool isText() const override { return m_isText; }
440     
441 protected:
442     MediaDescriptionAVFObjC(AVAssetTrack* track)
443         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
444         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
445         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
446     {
447         NSArray* formatDescriptions = [track formatDescriptions];
448         CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
449         if (description) {
450             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
451             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
452         }
453     }
454
455     AtomicString m_codec;
456     bool m_isVideo;
457     bool m_isAudio;
458     bool m_isText;
459 };
460
461 #pragma mark -
462 #pragma mark SourceBufferPrivateAVFObjC
463
464 static NSString *kBufferConsumedContext = @"BufferConsumedContext";
465
466 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void*, CFStringRef notificationName, const void*, CFTypeRef payload)
467 {
468     if (!isMainThread()) {
469         callOnMainThread([notificationName, payload = retainPtr(payload)] {
470             bufferWasConsumedCallback(nullptr, nullptr, notificationName, nullptr, payload.get());
471         });
472         return;
473     }
474
475     if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
476         return;
477
478     ASSERT(CFGetTypeID(payload) == CFDictionaryGetTypeID());
479     WebBufferConsumedContext *context = [(__bridge NSDictionary *)payload valueForKey:kBufferConsumedContext];
480     if (!context)
481         return;
482
483     if (auto sourceBuffer = context.parent)
484         sourceBuffer->bufferWasConsumed();
485 }
486
487 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
488 {
489     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
490 }
491
492 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
493     : m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
494     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
495     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:createWeakPtr()]))
496     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
497     , m_mediaSource(parent)
498 {
499     CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
500     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
501 }
502
503 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
504 {
505     ASSERT(!m_client);
506     destroyParser();
507     destroyRenderers();
508
509     CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
510
511     if (m_hasSessionSemaphore)
512         m_hasSessionSemaphore->signal();
513 }
514
515 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
516 {
517     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
518
519     if (!m_mediaSource)
520         return;
521
522     if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
523         for (AVAssetTrack *track in [asset tracks]) {
524             if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
525                 m_parsingSucceeded = false;
526                 return;
527             }
528         }
529     }
530
531     m_asset = asset;
532
533     m_videoTracks.clear();
534     m_audioTracks.clear();
535
536     m_discardSamplesUntilNextInitializationSegment = false;
537
538     SourceBufferPrivateClient::InitializationSegment segment;
539
540     if ([m_asset respondsToSelector:@selector(overallDurationHint)])
541         segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
542
543     if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
544         segment.duration = PAL::toMediaTime([m_asset duration]);
545
546     for (AVAssetTrack* track in [m_asset tracks]) {
547         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
548             // FIXME(125161): Handle in-band text tracks.
549             continue;
550         }
551
552         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
553             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
554             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
555             info.track = videoTrack;
556             m_videoTracks.append(videoTrack);
557             info.description = MediaDescriptionAVFObjC::create(track);
558             segment.videoTracks.append(info);
559         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
560             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
561             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
562             info.track = audioTrack;
563             m_audioTracks.append(audioTrack);
564             info.description = MediaDescriptionAVFObjC::create(track);
565             segment.audioTracks.append(info);
566         }
567
568         // FIXME(125161): Add TextTrack support
569     }
570
571     if (m_mediaSource)
572         m_mediaSource->player()->characteristicsChanged();
573
574     if (m_client)
575         m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
576 }
577
578 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
579 {
580 #if LOG_DISABLED
581     UNUSED_PARAM(error);
582 #endif
583     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
584
585     m_parsingSucceeded = false;
586 }
587
588 struct ProcessCodedFrameInfo {
589     SourceBufferPrivateAVFObjC* sourceBuffer;
590     int trackID;
591     const String& mediaType;
592 };
593
594 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
595 {
596     processCodedFrame(trackID, sampleBuffer, mediaType);
597 }
598
599 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
600 {
601     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
602         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
603         // will just confuse its state. Drop this sample until we can handle text tracks properly.
604         return false;
605     }
606
607     if (m_discardSamplesUntilNextInitializationSegment)
608         return false;
609
610     if (m_client) {
611         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
612         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(mediaSample.get()).utf8().data());
613         m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
614     }
615
616     return true;
617 }
618
619 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
620 {
621     notImplemented();
622 }
623
624 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
625 {
626     if (!m_mediaSource)
627         return;
628
629     ASSERT(m_parser);
630
631 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
632     LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
633     m_protectedTrackID = trackID;
634
635     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
636         session->addParser(m_parser.get());
637     else if (!CDMSessionAVContentKeySession::isAvailable()) {
638         BEGIN_BLOCK_OBJC_EXCEPTIONS;
639         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
640         END_BLOCK_OBJC_EXCEPTIONS;
641     }
642 #else
643     UNUSED_PARAM(trackID);
644 #endif
645 }
646
647 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, Box<BinarySemaphore> hasSessionSemaphore)
648 {
649     if (!m_mediaSource)
650         return;
651
652 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
653     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
654     m_protectedTrackID = trackID;
655     auto initDataArray = Uint8Array::create([initData length]);
656     [initData getBytes:initDataArray->data() length:initDataArray->length()];
657     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.ptr());
658     if (auto session = m_mediaSource->player()->cdmSession()) {
659         session->addParser(m_parser.get());
660         hasSessionSemaphore->signal();
661     } else {
662         if (m_hasSessionSemaphore)
663             m_hasSessionSemaphore->signal();
664         m_hasSessionSemaphore = hasSessionSemaphore;
665     }
666 #endif
667
668 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
669     //
670     auto initDataBuffer = SharedBuffer::create(initData);
671     auto keyIDs = CDMPrivateFairPlayStreaming::extractKeyIDsSinf(initDataBuffer);
672     if (!keyIDs)
673         return;
674
675     if (m_cdmInstance) {
676         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(keyIDs.value())) {
677             [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
678             if (m_hasSessionSemaphore) {
679                 m_hasSessionSemaphore->signal();
680                 m_hasSessionSemaphore = nullptr;
681             }
682             m_waitingForKey = false;
683             return;
684         }
685     }
686
687     m_keyIDs = WTFMove(keyIDs.value());
688     m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
689
690     m_waitingForKey = true;
691     m_mediaSource->player()->waitingForKeyChanged();
692 #endif
693
694     UNUSED_PARAM(initData);
695     UNUSED_PARAM(trackID);
696     UNUSED_PARAM(hasSessionSemaphore);
697 }
698
699 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
700 {
701     m_client = client;
702 }
703
704 static dispatch_queue_t globalDataParserQueue()
705 {
706     static dispatch_queue_t globalQueue;
707     static dispatch_once_t onceToken;
708     dispatch_once(&onceToken, ^{
709         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
710     });
711     return globalQueue;
712 }
713
714 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
715 {
716     LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data.data(), data.size());
717
718     // FIXME: Avoid the data copy by wrapping around the Vector<> object.
719     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data.data() length:data.size()]);
720     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr(*this);
721     RetainPtr<AVStreamDataParser> parser = m_parser;
722     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
723
724     m_parsingSucceeded = true;
725     dispatch_group_enter(m_isAppendingGroup.get());
726
727     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
728         if (parserStateWasReset)
729             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
730         else
731             [parser appendStreamData:nsData.get()];
732
733         callOnMainThread([weakThis] {
734             if (weakThis)
735                 weakThis->appendCompleted();
736         });
737         dispatch_group_leave(isAppendingGroup.get());
738     });
739     m_parserStateWasReset = false;
740 }
741
742 void SourceBufferPrivateAVFObjC::appendCompleted()
743 {
744     if (m_parsingSucceeded && m_mediaSource)
745         m_mediaSource->player()->setLoadingProgresssed(true);
746
747     if (m_client)
748         m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
749 }
750
751 void SourceBufferPrivateAVFObjC::abort()
752 {
753     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
754     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
755     // semaphore, the m_isAppendingGroup wait operation will deadlock.
756     if (m_hasSessionSemaphore)
757         m_hasSessionSemaphore->signal();
758     m_delegate.get().abortSemaphore->signal();
759     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
760     m_appendWeakFactory.revokeAll();
761     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
762     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
763 }
764
765 void SourceBufferPrivateAVFObjC::resetParserState()
766 {
767     m_parserStateWasReset = true;
768     m_discardSamplesUntilNextInitializationSegment = true;
769 }
770
771 void SourceBufferPrivateAVFObjC::destroyParser()
772 {
773 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
774     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
775         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
776 #endif
777 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
778     if (m_cdmInstance) {
779         if (auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs))
780             [instanceSession->contentKeySession() removeContentKeyRecipient:m_parser.get()];
781     }
782 #endif
783
784     [m_delegate invalidate];
785     m_delegate = nullptr;
786     m_parser = nullptr;
787 }
788
789 void SourceBufferPrivateAVFObjC::destroyRenderers()
790 {
791     if (m_displayLayer)
792         setVideoLayer(nullptr);
793
794     if (m_decompressionSession)
795         setDecompressionSession(nullptr);
796
797     for (auto& renderer : m_audioRenderers.values()) {
798         if (m_mediaSource)
799             m_mediaSource->player()->removeAudioRenderer(renderer.get());
800         [renderer flush];
801         [renderer stopRequestingMediaData];
802         [m_errorListener stopObservingRenderer:renderer.get()];
803     }
804
805     [m_errorListener invalidate];
806     m_errorListener = nullptr;
807
808     m_audioRenderers.clear();
809 }
810
811 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
812 {
813     destroyParser();
814     destroyRenderers();
815
816     if (m_mediaSource)
817         m_mediaSource->removeSourceBuffer(this);
818 }
819
820 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
821 {
822     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
823 }
824
825 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
826 {
827     if (m_mediaSource)
828         m_mediaSource->player()->setReadyState(readyState);
829 }
830
831 bool SourceBufferPrivateAVFObjC::hasVideo() const
832 {
833     return m_client && m_client->sourceBufferPrivateHasVideo();
834 }
835
836 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
837 {
838     return m_enabledVideoTrackID != -1;
839 }
840
841 bool SourceBufferPrivateAVFObjC::hasAudio() const
842 {
843     return m_client && m_client->sourceBufferPrivateHasAudio();
844 }
845
846 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
847 {
848     int trackID = track->trackID();
849     if (!track->selected() && m_enabledVideoTrackID == trackID) {
850         m_enabledVideoTrackID = -1;
851         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
852
853         if (m_decompressionSession)
854             m_decompressionSession->stopRequestingMediaData();
855     } else if (track->selected()) {
856         m_enabledVideoTrackID = trackID;
857         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
858
859         if (m_decompressionSession) {
860             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
861                 didBecomeReadyForMoreSamples(trackID);
862             });
863         }
864     }
865
866     m_mediaSource->hasSelectedVideoChanged(*this);
867 }
868
869 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
870 {
871     int trackID = track->trackID();
872
873     if (!track->enabled()) {
874         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
875         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
876         ALLOW_NEW_API_WITHOUT_GUARDS_END
877         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
878         if (m_mediaSource)
879             m_mediaSource->player()->removeAudioRenderer(renderer.get());
880     } else {
881         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
882         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
883         RetainPtr<AVSampleBufferAudioRenderer> renderer;
884         ALLOW_NEW_API_WITHOUT_GUARDS_END
885         if (!m_audioRenderers.contains(trackID)) {
886             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
887             auto weakThis = createWeakPtr();
888             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
889                 if (weakThis)
890                     weakThis->didBecomeReadyForMoreSamples(trackID);
891             }];
892             m_audioRenderers.set(trackID, renderer);
893             [m_errorListener beginObservingRenderer:renderer.get()];
894         } else
895             renderer = m_audioRenderers.get(trackID);
896
897         if (m_mediaSource)
898             m_mediaSource->player()->addAudioRenderer(renderer.get());
899     }
900 }
901
902 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
903 {
904 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
905     if (session == m_session)
906         return;
907
908     if (m_session)
909         m_session->removeSourceBuffer(this);
910
911     m_session = session;
912
913     if (m_session) {
914         m_session->addSourceBuffer(this);
915         if (m_hasSessionSemaphore) {
916             m_hasSessionSemaphore->signal();
917             m_hasSessionSemaphore = nullptr;
918         }
919
920         if (m_hdcpError) {
921             WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
922             callOnMainThread([weakThis] {
923                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
924                     return;
925
926                 bool ignored = false;
927                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
928             });
929         }
930     }
931 #else
932     UNUSED_PARAM(session);
933 #endif
934 }
935
936 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
937 {
938 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
939     auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
940     if (fpsInstance == m_cdmInstance)
941         return;
942
943     m_cdmInstance = fpsInstance;
944     attemptToDecrypt();
945 #else
946     UNUSED_PARAM(instance);
947 #endif
948 }
949
950 void SourceBufferPrivateAVFObjC::attemptToDecrypt()
951 {
952 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
953     if (!m_cdmInstance || m_keyIDs.isEmpty() || !m_waitingForKey)
954         return;
955
956     auto instanceSession = m_cdmInstance->sessionForKeyIDs(m_keyIDs);
957     if (!instanceSession)
958         return;
959
960     [instanceSession->contentKeySession() addContentKeyRecipient:m_parser.get()];
961     if (m_hasSessionSemaphore) {
962         m_hasSessionSemaphore->signal();
963         m_hasSessionSemaphore = nullptr;
964     }
965     m_waitingForKey = false;
966 #endif
967 }
968
969 void SourceBufferPrivateAVFObjC::flush()
970 {
971     flushVideo();
972
973     for (auto& renderer : m_audioRenderers.values())
974         flush(renderer.get());
975 }
976
977 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
978 {
979     ASSERT(!m_errorClients.contains(client));
980     m_errorClients.append(client);
981 }
982
983 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
984 {
985     ASSERT(m_errorClients.contains(client));
986     m_errorClients.remove(m_errorClients.find(client));
987 }
988
989 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
990 {
991     LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
992
993     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
994     bool anyIgnored = false;
995     for (auto& client : m_errorClients) {
996         bool shouldIgnore = false;
997         client->layerDidReceiveError(layer, error, shouldIgnore);
998         anyIgnored |= shouldIgnore;
999     }
1000     if (anyIgnored)
1001         return;
1002
1003     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
1004
1005     if (m_client)
1006         m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
1007 }
1008
1009 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
1010 {
1011 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1012     if (m_mediaSource->cdmInstance()) {
1013         m_mediaSource->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
1014         return;
1015     }
1016 #else
1017     UNUSED_PARAM(obscured);
1018 #endif
1019
1020     RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
1021     layerDidReceiveError(m_displayLayer.get(), error.get());
1022 }
1023
1024 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1025 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
1026 ALLOW_NEW_API_WITHOUT_GUARDS_END
1027 {
1028     LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
1029
1030     if ([error code] == 'HDCP')
1031         m_hdcpError = error;
1032
1033     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
1034     bool anyIgnored = false;
1035     for (auto& client : m_errorClients) {
1036         bool shouldIgnore = false;
1037         client->rendererDidReceiveError(renderer, error, shouldIgnore);
1038         anyIgnored |= shouldIgnore;
1039     }
1040     if (anyIgnored)
1041         return;
1042 }
1043
1044 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1045 {
1046     int trackID = trackIDString.toInt();
1047     LOG(MediaSource, "SourceBufferPrivateAVFObjC::flush(%p) - trackId: %d", this, trackID);
1048
1049     if (trackID == m_enabledVideoTrackID) {
1050         flushVideo();
1051     } else if (m_audioRenderers.contains(trackID))
1052         flush(m_audioRenderers.get(trackID).get());
1053 }
1054
1055 void SourceBufferPrivateAVFObjC::flushVideo()
1056 {
1057     [m_displayLayer flush];
1058
1059     if (m_decompressionSession) {
1060         m_decompressionSession->flush();
1061         m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1062             if (weakThis && weakThis->m_mediaSource)
1063                 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1064         });
1065     }
1066
1067     m_cachedSize = WTF::nullopt;
1068
1069     if (m_mediaSource) {
1070         m_mediaSource->player()->setHasAvailableVideoFrame(false);
1071         m_mediaSource->player()->flushPendingSizeChanges();
1072     }
1073 }
1074
1075 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1076 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1077 ALLOW_NEW_API_WITHOUT_GUARDS_END
1078 {
1079     [renderer flush];
1080
1081     if (m_mediaSource)
1082         m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1083 }
1084
1085 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1086 {
1087     int trackID = trackIDString.toInt();
1088     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1089         return;
1090
1091     PlatformSample platformSample = sample->platformSample();
1092     if (platformSample.type != PlatformSample::CMSampleBufferType)
1093         return;
1094
1095     LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(sample.get()).utf8().data());
1096
1097     if (trackID == m_enabledVideoTrackID) {
1098         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1099         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1100         if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1101             LOG(MediaSource, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
1102             bool sizeWasNull = !m_cachedSize;
1103             m_cachedSize = formatSize;
1104             if (m_mediaSource) {
1105                 if (sizeWasNull)
1106                     m_mediaSource->player()->setNaturalSize(formatSize);
1107                 else
1108                     m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1109             }
1110         }
1111
1112         if (m_decompressionSession)
1113             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1114
1115         if (m_displayLayer) {
1116             if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1117                 auto context = adoptNS([[WebBufferConsumedContext alloc] initWithParent:createWeakPtr()]);
1118                 CMSampleBufferRef rawSampleCopy;
1119                 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1120                 auto sampleCopy = adoptCF(rawSampleCopy);
1121                 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{kBufferConsumedContext: context.get()}, kCMAttachmentMode_ShouldNotPropagate);
1122                 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1123             } else
1124                 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1125         }
1126     } else {
1127         auto renderer = m_audioRenderers.get(trackID);
1128         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1129         if (m_mediaSource && !sample->isNonDisplaying())
1130             m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1131     }
1132 }
1133
1134 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1135 {
1136     if (m_mediaSource)
1137         m_mediaSource->player()->setHasAvailableVideoFrame(true);
1138 }
1139
1140 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1141 {
1142     int trackID = trackIDString.toInt();
1143     if (trackID == m_enabledVideoTrackID) {
1144         if (m_decompressionSession)
1145             return m_decompressionSession->isReadyForMoreMediaData();
1146
1147         return [m_displayLayer isReadyForMoreMediaData];
1148     }
1149
1150     if (m_audioRenderers.contains(trackID))
1151         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1152
1153     return false;
1154 }
1155
1156 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1157 {
1158     if (m_mediaSource)
1159         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1160 }
1161
1162 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
1163 {
1164     if (!m_client)
1165         return time;
1166     return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1167 }
1168
1169 void SourceBufferPrivateAVFObjC::willSeek()
1170 {
1171     flush();
1172 }
1173
1174 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1175 {
1176     return m_cachedSize.value_or(FloatSize());
1177 }
1178
1179 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1180 {
1181     LOG(Media, "SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(%p) - track(%d)", this, trackID);
1182     if (trackID == m_enabledVideoTrackID) {
1183         if (m_decompressionSession)
1184             m_decompressionSession->stopRequestingMediaData();
1185         [m_displayLayer stopRequestingMediaData];
1186     } else if (m_audioRenderers.contains(trackID))
1187         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1188     else
1189         return;
1190
1191     if (m_client)
1192         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1193 }
1194
1195 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1196 {
1197     int trackID = trackIDString.toInt();
1198     if (trackID == m_enabledVideoTrackID) {
1199         if (m_decompressionSession) {
1200             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1201                 didBecomeReadyForMoreSamples(trackID);
1202             });
1203         }
1204         if (m_displayLayer) {
1205             auto weakThis = createWeakPtr();
1206             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1207                 if (weakThis)
1208                     weakThis->didBecomeReadyForMoreSamples(trackID);
1209             }];
1210         }
1211     } else if (m_audioRenderers.contains(trackID)) {
1212         auto weakThis = createWeakPtr();
1213         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1214             if (weakThis)
1215                 weakThis->didBecomeReadyForMoreSamples(trackID);
1216         }];
1217     }
1218 }
1219
1220 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1221 {
1222     MediaEngineSupportParameters parameters;
1223     parameters.isMediaSource = true;
1224     parameters.type = contentType;
1225     return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1226 }
1227
1228 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1229 {
1230     if (layer == m_displayLayer)
1231         return;
1232
1233     ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1234
1235     if (m_displayLayer) {
1236         [m_displayLayer flush];
1237         [m_displayLayer stopRequestingMediaData];
1238         [m_errorListener stopObservingLayer:m_displayLayer.get()];
1239     }
1240
1241     m_displayLayer = layer;
1242
1243     if (m_displayLayer) {
1244         auto weakThis = createWeakPtr();
1245         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1246             if (weakThis)
1247                 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1248         }];
1249         [m_errorListener beginObservingLayer:m_displayLayer.get()];
1250         if (m_client)
1251             m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1252     }
1253 }
1254
1255 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1256 {
1257     if (m_decompressionSession == decompressionSession)
1258         return;
1259
1260     if (m_decompressionSession) {
1261         m_decompressionSession->stopRequestingMediaData();
1262         m_decompressionSession->invalidate();
1263     }
1264
1265     m_decompressionSession = decompressionSession;
1266
1267     if (!m_decompressionSession)
1268         return;
1269
1270     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1271     m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1272         if (weakThis)
1273             weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1274     });
1275     m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1276         if (weakThis && weakThis->m_mediaSource)
1277             weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1278     });
1279     if (m_client)
1280         m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1281 }
1282
1283 }
1284
1285 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)