[WTF] Use Semaphore and BinarySemaphore instead of dispatch_semaphore_t
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
34 #import "CDMSessionAVContentKeySession.h"
35 #import "CDMSessionMediaSourceAVFObjC.h"
36 #import "InbandTextTrackPrivateAVFObjC.h"
37 #import "Logging.h"
38 #import "MediaDescription.h"
39 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
40 #import "MediaSample.h"
41 #import "MediaSampleAVFObjC.h"
42 #import "MediaSourcePrivateAVFObjC.h"
43 #import "NotImplemented.h"
44 #import "SharedBuffer.h"
45 #import "SourceBufferPrivateClient.h"
46 #import "TimeRanges.h"
47 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
48 #import "WebCoreDecompressionSession.h"
49 #import <AVFoundation/AVAssetTrack.h>
50 #import <JavaScriptCore/TypedArrayInlines.h>
51 #import <QuartzCore/CALayer.h>
52 #import <objc/runtime.h>
53 #import <pal/avfoundation/MediaTimeAVFoundation.h>
54 #import <pal/spi/mac/AVFoundationSPI.h>
55 #import <wtf/BlockObjCExceptions.h>
56 #import <wtf/HashCountedSet.h>
57 #import <wtf/MainThread.h>
58 #import <wtf/Semaphore.h>
59 #import <wtf/SoftLinking.h>
60 #import <wtf/WeakPtr.h>
61 #import <wtf/text/AtomicString.h>
62 #import <wtf/text/CString.h>
63
64 #pragma mark - Soft Linking
65
66 #import <pal/cf/CoreMediaSoftLink.h>
67
68 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
69
70 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
71 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
72 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
73 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
74 ALLOW_NEW_API_WITHOUT_GUARDS_END
75 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
76 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
77
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
82 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
83
84 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
85 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
86
87 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
88 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
89 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
90
91 #pragma mark -
92 #pragma mark AVStreamSession
93
94 @interface AVStreamSession : NSObject
95 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
96 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
97 @end
98
99 #pragma mark -
100 #pragma mark WebAVStreamDataParserListener
101
102 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
103     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
104     Box<Semaphore> _abortSemaphore;
105     AVStreamDataParser* _parser;
106 }
107 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
108 @property (assign) Box<Semaphore> abortSemaphore;
109 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
110 @end
111
112 @implementation WebAVStreamDataParserListener
113 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
114 {
115     self = [super init];
116     if (!self)
117         return nil;
118
119     ASSERT(parent);
120     _parent = parent;
121     _parser = parser;
122     [_parser setDelegate:self];
123     return self;
124 }
125
126 @synthesize parent=_parent;
127 @synthesize abortSemaphore=_abortSemaphore;
128
129 - (void)dealloc
130 {
131     [_parser setDelegate:nil];
132     [super dealloc];
133 }
134
135 - (void)invalidate
136 {
137     [_parser setDelegate:nil];
138     _parser = nullptr;
139 }
140
141 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
142 {
143     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
144
145     RetainPtr<AVAsset*> protectedAsset = asset;
146     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
147         if (parent)
148             parent->didParseStreamDataAsAsset(protectedAsset.get());
149     });
150 }
151
152 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
153 {
154     UNUSED_PARAM(discontinuity);
155     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
156
157     RetainPtr<AVAsset*> protectedAsset = asset;
158     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
159         if (parent)
160             parent->didParseStreamDataAsAsset(protectedAsset.get());
161     });
162 }
163
164 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
165 {
166     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
167
168     RetainPtr<NSError> protectedError = error;
169     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
170         if (parent)
171             parent->didFailToParseStreamDataWithError(protectedError.get());
172     });
173 }
174
175 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
176 {
177     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
178
179     RetainPtr<CMSampleBufferRef> protectedSample = sample;
180     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
181         if (parent)
182             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
183     });
184 }
185
186 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
187 {
188     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
189
190     callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
191         if (parent)
192             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
193     });
194 }
195
196 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
197 {
198     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
199
200     // We must call synchronously to the main thread, as the AVStreamSession must be associated
201     // with the streamDataParser before the delegate method returns.
202     Box<BinarySemaphore> respondedSemaphore = Box<BinarySemaphore>::create();
203     callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
204         if (parent)
205             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
206         respondedSemaphore->signal();
207     });
208
209     while (true) {
210         if (respondedSemaphore->waitFor(100_ms))
211             return;
212
213         if (_abortSemaphore->waitFor(100_ms)) {
214             _abortSemaphore->signal();
215             return;
216         }
217     }
218 }
219
220 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
221 {
222     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
223
224     Box<BinarySemaphore> hasSessionSemaphore = Box<BinarySemaphore>::create();
225     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
226         if (parent)
227             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
228     });
229
230     while (true) {
231         if (hasSessionSemaphore->waitFor(100_ms))
232             return;
233
234         if (_abortSemaphore->waitFor(100_ms)) {
235             _abortSemaphore->signal();
236             return;
237         }
238     }
239 }
240 @end
241
242 @interface WebAVSampleBufferErrorListener : NSObject {
243     WebCore::SourceBufferPrivateAVFObjC* _parent;
244     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
245     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
246     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
247     ALLOW_NEW_API_WITHOUT_GUARDS_END
248 }
249
250 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
251 - (void)invalidate;
252 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
253 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
254 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
255 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
256 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
257 ALLOW_NEW_API_WITHOUT_GUARDS_END
258 @end
259
260 @implementation WebAVSampleBufferErrorListener
261
262 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
263 {
264     if (!(self = [super init]))
265         return nil;
266
267     _parent = parent;
268     return self;
269 }
270
271 - (void)dealloc
272 {
273     [self invalidate];
274     [super dealloc];
275 }
276
277 - (void)invalidate
278 {
279     if (!_parent && !_layers.size() && !_renderers.size())
280         return;
281
282     for (auto& layer : _layers) {
283         [layer removeObserver:self forKeyPath:@"error"];
284         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
285     }
286     _layers.clear();
287
288     for (auto& renderer : _renderers)
289         [renderer removeObserver:self forKeyPath:@"error"];
290     _renderers.clear();
291
292     [[NSNotificationCenter defaultCenter] removeObserver:self];
293
294     _parent = nullptr;
295 }
296
297 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
298 {
299     ASSERT(_parent);
300     ASSERT(!_layers.contains(layer));
301
302     _layers.append(layer);
303     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
304     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
305     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
306 }
307
308 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
309 {
310     ASSERT(_parent);
311     ASSERT(_layers.contains(layer));
312
313     [layer removeObserver:self forKeyPath:@"error"];
314     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
315     _layers.remove(_layers.find(layer));
316
317     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
318 }
319
320 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
321 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
322 {
323 ALLOW_NEW_API_WITHOUT_GUARDS_END
324     ASSERT(_parent);
325     ASSERT(!_renderers.contains(renderer));
326
327     _renderers.append(renderer);
328     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
329 }
330
331 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
332 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
333 ALLOW_NEW_API_WITHOUT_GUARDS_END
334 {
335     ASSERT(_parent);
336     ASSERT(_renderers.contains(renderer));
337
338     [renderer removeObserver:self forKeyPath:@"error"];
339     _renderers.remove(_renderers.find(renderer));
340 }
341
342 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
343 {
344     UNUSED_PARAM(context);
345     UNUSED_PARAM(keyPath);
346     ASSERT(_parent);
347
348     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
349     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
350         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
351         ASSERT(_layers.contains(layer.get()));
352
353         if ([keyPath isEqualTo:@"error"]) {
354             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
355             callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
356                 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
357             });
358         } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
359             callOnMainThread([protectedSelf = WTFMove(protectedSelf), obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
360                 protectedSelf->_parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
361             });
362         } else
363             ASSERT_NOT_REACHED();
364
365     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
366         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
367         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
368         ALLOW_NEW_API_WITHOUT_GUARDS_END
369         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
370
371         ASSERT(_renderers.contains(renderer.get()));
372         ASSERT([keyPath isEqualTo:@"error"]);
373
374         callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), error = WTFMove(error)] {
375             protectedSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
376         });
377     } else
378         ASSERT_NOT_REACHED();
379 }
380
381 - (void)layerFailedToDecode:(NSNotification*)note
382 {
383     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
384     RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
385
386     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
387     callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
388         if (!protectedSelf->_parent || !protectedSelf->_layers.contains(layer.get()))
389             return;
390         protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
391     });
392 }
393 @end
394
395 #pragma mark -
396
397 @interface WebBufferConsumedContext : NSObject {
398     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
399 }
400 @property (readonly) WebCore::SourceBufferPrivateAVFObjC* parent;
401 @end
402
403 @implementation WebBufferConsumedContext
404 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
405 {
406     self = [super init];
407     if (self)
408         _parent = parent;
409     return self;
410 }
411
412 @dynamic parent;
413 - (WebCore::SourceBufferPrivateAVFObjC*)parent
414 {
415     return _parent.get();
416 }
417 @end
418
419 namespace WebCore {
420 using namespace PAL;
421
422 #pragma mark -
423 #pragma mark MediaDescriptionAVFObjC
424
425 class MediaDescriptionAVFObjC final : public MediaDescription {
426 public:
427     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
428     virtual ~MediaDescriptionAVFObjC() { }
429
430     AtomicString codec() const override { return m_codec; }
431     bool isVideo() const override { return m_isVideo; }
432     bool isAudio() const override { return m_isAudio; }
433     bool isText() const override { return m_isText; }
434     
435 protected:
436     MediaDescriptionAVFObjC(AVAssetTrack* track)
437         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
438         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
439         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
440     {
441         NSArray* formatDescriptions = [track formatDescriptions];
442         CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
443         if (description) {
444             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
445             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
446         }
447     }
448
449     AtomicString m_codec;
450     bool m_isVideo;
451     bool m_isAudio;
452     bool m_isText;
453 };
454
455 #pragma mark -
456 #pragma mark SourceBufferPrivateAVFObjC
457
458 static NSString *kBufferConsumedContext = @"BufferConsumedContext";
459
460 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void*, CFStringRef notificationName, const void*, CFTypeRef payload)
461 {
462     if (!isMainThread()) {
463         callOnMainThread([notificationName, payload = retainPtr(payload)] {
464             bufferWasConsumedCallback(nullptr, nullptr, notificationName, nullptr, payload.get());
465         });
466         return;
467     }
468
469     if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
470         return;
471
472     ASSERT(CFGetTypeID(payload) == CFDictionaryGetTypeID());
473     WebBufferConsumedContext *context = [(__bridge NSDictionary *)payload valueForKey:kBufferConsumedContext];
474     if (!context)
475         return;
476
477     if (auto sourceBuffer = context.parent)
478         sourceBuffer->bufferWasConsumed();
479 }
480
481 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
482 {
483     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
484 }
485
486 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
487     : m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
488     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
489     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
490     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
491     , m_mediaSource(parent)
492 {
493     CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
494     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
495 }
496
497 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
498 {
499     ASSERT(!m_client);
500     destroyParser();
501     destroyRenderers();
502
503     CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
504
505     if (m_hasSessionSemaphore)
506         m_hasSessionSemaphore->signal();
507 }
508
509 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
510 {
511     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
512
513     if (!m_mediaSource)
514         return;
515
516     if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
517         for (AVAssetTrack *track in [asset tracks]) {
518             if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
519                 m_parsingSucceeded = false;
520                 return;
521             }
522         }
523     }
524
525     m_asset = asset;
526
527     m_videoTracks.clear();
528     m_audioTracks.clear();
529
530     m_discardSamplesUntilNextInitializationSegment = false;
531
532     SourceBufferPrivateClient::InitializationSegment segment;
533
534     if ([m_asset respondsToSelector:@selector(overallDurationHint)])
535         segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
536
537     if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
538         segment.duration = PAL::toMediaTime([m_asset duration]);
539
540     for (AVAssetTrack* track in [m_asset tracks]) {
541         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
542             // FIXME(125161): Handle in-band text tracks.
543             continue;
544         }
545
546         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
547             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
548             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
549             info.track = videoTrack;
550             m_videoTracks.append(videoTrack);
551             info.description = MediaDescriptionAVFObjC::create(track);
552             segment.videoTracks.append(info);
553         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
554             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
555             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
556             info.track = audioTrack;
557             m_audioTracks.append(audioTrack);
558             info.description = MediaDescriptionAVFObjC::create(track);
559             segment.audioTracks.append(info);
560         }
561
562         // FIXME(125161): Add TextTrack support
563     }
564
565     if (m_mediaSource)
566         m_mediaSource->player()->characteristicsChanged();
567
568     if (m_client)
569         m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
570 }
571
572 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
573 {
574 #if LOG_DISABLED
575     UNUSED_PARAM(error);
576 #endif
577     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
578
579     m_parsingSucceeded = false;
580 }
581
582 struct ProcessCodedFrameInfo {
583     SourceBufferPrivateAVFObjC* sourceBuffer;
584     int trackID;
585     const String& mediaType;
586 };
587
588 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
589 {
590     processCodedFrame(trackID, sampleBuffer, mediaType);
591 }
592
593 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
594 {
595     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
596         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
597         // will just confuse its state. Drop this sample until we can handle text tracks properly.
598         return false;
599     }
600
601     if (m_discardSamplesUntilNextInitializationSegment)
602         return false;
603
604     if (m_client) {
605         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
606         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(mediaSample.get()).utf8().data());
607         m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
608     }
609
610     return true;
611 }
612
613 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
614 {
615     notImplemented();
616 }
617
618 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
619 {
620     if (!m_mediaSource)
621         return;
622
623     ASSERT(m_parser);
624
625 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
626     LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
627     m_protectedTrackID = trackID;
628
629     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
630         session->addParser(m_parser.get());
631     else if (!CDMSessionAVContentKeySession::isAvailable()) {
632         BEGIN_BLOCK_OBJC_EXCEPTIONS;
633         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
634         END_BLOCK_OBJC_EXCEPTIONS;
635     }
636 #else
637     UNUSED_PARAM(trackID);
638 #endif
639 }
640
641 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, Box<BinarySemaphore> hasSessionSemaphore)
642 {
643     if (!m_mediaSource)
644         return;
645
646 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
647     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
648     m_protectedTrackID = trackID;
649     RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
650     [initData getBytes:initDataArray->data() length:initDataArray->length()];
651     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
652     if (auto session = m_mediaSource->player()->cdmSession()) {
653         session->addParser(m_parser.get());
654         hasSessionSemaphore->signal();
655     } else {
656         if (m_hasSessionSemaphore)
657             m_hasSessionSemaphore->signal();
658         m_hasSessionSemaphore = hasSessionSemaphore;
659     }
660 #endif
661
662 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
663     if (m_mediaSource) {
664         auto initDataBuffer = SharedBuffer::create(initData);
665         m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
666     }
667 #endif
668
669     UNUSED_PARAM(initData);
670     UNUSED_PARAM(trackID);
671     UNUSED_PARAM(hasSessionSemaphore);
672 }
673
674 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
675 {
676     m_client = client;
677 }
678
679 static dispatch_queue_t globalDataParserQueue()
680 {
681     static dispatch_queue_t globalQueue;
682     static dispatch_once_t onceToken;
683     dispatch_once(&onceToken, ^{
684         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
685     });
686     return globalQueue;
687 }
688
689 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
690 {
691     LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data.data(), data.size());
692
693     // FIXME: Avoid the data copy by wrapping around the Vector<> object.
694     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data.data() length:data.size()]);
695     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr(*this);
696     RetainPtr<AVStreamDataParser> parser = m_parser;
697     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
698
699     m_parsingSucceeded = true;
700     dispatch_group_enter(m_isAppendingGroup.get());
701
702     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
703         if (parserStateWasReset)
704             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
705         else
706             [parser appendStreamData:nsData.get()];
707
708         callOnMainThread([weakThis] {
709             if (weakThis)
710                 weakThis->appendCompleted();
711         });
712         dispatch_group_leave(isAppendingGroup.get());
713     });
714     m_parserStateWasReset = false;
715 }
716
717 void SourceBufferPrivateAVFObjC::appendCompleted()
718 {
719     if (m_parsingSucceeded && m_mediaSource)
720         m_mediaSource->player()->setLoadingProgresssed(true);
721
722     if (m_client)
723         m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
724 }
725
726 void SourceBufferPrivateAVFObjC::abort()
727 {
728     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
729     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
730     // semaphore, the m_isAppendingGroup wait operation will deadlock.
731     if (m_hasSessionSemaphore)
732         m_hasSessionSemaphore->signal();
733     m_delegate.get().abortSemaphore->signal();
734     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
735     m_appendWeakFactory.revokeAll();
736     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
737     m_delegate.get().abortSemaphore = Box<Semaphore>::create(0);
738 }
739
740 void SourceBufferPrivateAVFObjC::resetParserState()
741 {
742     m_parserStateWasReset = true;
743     m_discardSamplesUntilNextInitializationSegment = true;
744 }
745
746 void SourceBufferPrivateAVFObjC::destroyParser()
747 {
748 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
749     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
750         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
751 #endif
752 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
753     if (m_cdmInstance)
754         [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
755 #endif
756
757     [m_delegate invalidate];
758     m_delegate = nullptr;
759     m_parser = nullptr;
760 }
761
762 void SourceBufferPrivateAVFObjC::destroyRenderers()
763 {
764     if (m_displayLayer)
765         setVideoLayer(nullptr);
766
767     if (m_decompressionSession)
768         setDecompressionSession(nullptr);
769
770     for (auto& renderer : m_audioRenderers.values()) {
771         if (m_mediaSource)
772             m_mediaSource->player()->removeAudioRenderer(renderer.get());
773         [renderer flush];
774         [renderer stopRequestingMediaData];
775         [m_errorListener stopObservingRenderer:renderer.get()];
776     }
777
778     m_audioRenderers.clear();
779 }
780
781 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
782 {
783     destroyParser();
784     destroyRenderers();
785
786     if (m_mediaSource)
787         m_mediaSource->removeSourceBuffer(this);
788 }
789
790 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
791 {
792     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
793 }
794
795 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
796 {
797     if (m_mediaSource)
798         m_mediaSource->player()->setReadyState(readyState);
799 }
800
801 bool SourceBufferPrivateAVFObjC::hasVideo() const
802 {
803     return m_client && m_client->sourceBufferPrivateHasVideo();
804 }
805
806 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
807 {
808     return m_enabledVideoTrackID != -1;
809 }
810
811 bool SourceBufferPrivateAVFObjC::hasAudio() const
812 {
813     return m_client && m_client->sourceBufferPrivateHasAudio();
814 }
815
816 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
817 {
818     int trackID = track->trackID();
819     if (!track->selected() && m_enabledVideoTrackID == trackID) {
820         m_enabledVideoTrackID = -1;
821         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
822
823         if (m_decompressionSession)
824             m_decompressionSession->stopRequestingMediaData();
825     } else if (track->selected()) {
826         m_enabledVideoTrackID = trackID;
827         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
828
829         if (m_decompressionSession) {
830             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
831                 didBecomeReadyForMoreSamples(trackID);
832             });
833         }
834     }
835
836     m_mediaSource->hasSelectedVideoChanged(*this);
837 }
838
839 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
840 {
841     int trackID = track->trackID();
842
843     if (!track->enabled()) {
844         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
845         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
846         ALLOW_NEW_API_WITHOUT_GUARDS_END
847         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
848         if (m_mediaSource)
849             m_mediaSource->player()->removeAudioRenderer(renderer.get());
850     } else {
851         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
852         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
853         RetainPtr<AVSampleBufferAudioRenderer> renderer;
854         ALLOW_NEW_API_WITHOUT_GUARDS_END
855         if (!m_audioRenderers.contains(trackID)) {
856             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
857             auto weakThis = createWeakPtr();
858             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
859                 if (weakThis)
860                     weakThis->didBecomeReadyForMoreSamples(trackID);
861             }];
862             m_audioRenderers.set(trackID, renderer);
863             [m_errorListener beginObservingRenderer:renderer.get()];
864         } else
865             renderer = m_audioRenderers.get(trackID);
866
867         if (m_mediaSource)
868             m_mediaSource->player()->addAudioRenderer(renderer.get());
869     }
870 }
871
872 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
873 {
874 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
875     if (session == m_session)
876         return;
877
878     if (m_session)
879         m_session->removeSourceBuffer(this);
880
881     m_session = session;
882
883     if (m_session) {
884         m_session->addSourceBuffer(this);
885         if (m_hasSessionSemaphore) {
886             m_hasSessionSemaphore->signal();
887             m_hasSessionSemaphore = nullptr;
888         }
889
890         if (m_hdcpError) {
891             WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
892             callOnMainThread([weakThis] {
893                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
894                     return;
895
896                 bool ignored = false;
897                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
898             });
899         }
900     }
901 #else
902     UNUSED_PARAM(session);
903 #endif
904 }
905
906 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
907 {
908 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
909     auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
910     if (!fpsInstance || fpsInstance == m_cdmInstance)
911         return;
912
913     if (m_cdmInstance)
914         [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
915
916     m_cdmInstance = fpsInstance;
917
918     if (m_cdmInstance) {
919         [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_parser.get()];
920         if (m_hasSessionSemaphore) {
921             m_hasSessionSemaphore->signal();
922             m_hasSessionSemaphore = nullptr;
923         }
924     }
925 #else
926     UNUSED_PARAM(instance);
927 #endif
928 }
929
930 void SourceBufferPrivateAVFObjC::flush()
931 {
932     flushVideo();
933
934     for (auto& renderer : m_audioRenderers.values())
935         flush(renderer.get());
936 }
937
938 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
939 {
940     ASSERT(!m_errorClients.contains(client));
941     m_errorClients.append(client);
942 }
943
944 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
945 {
946     ASSERT(m_errorClients.contains(client));
947     m_errorClients.remove(m_errorClients.find(client));
948 }
949
950 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
951 {
952     LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
953
954     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
955     bool anyIgnored = false;
956     for (auto& client : m_errorClients) {
957         bool shouldIgnore = false;
958         client->layerDidReceiveError(layer, error, shouldIgnore);
959         anyIgnored |= shouldIgnore;
960     }
961     if (anyIgnored)
962         return;
963
964     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
965
966     if (m_client)
967         m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
968 }
969
970 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
971 {
972 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
973     if (m_mediaSource->player()->cdmInstance()) {
974         m_mediaSource->player()->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
975         return;
976     }
977 #else
978     UNUSED_PARAM(obscured);
979 #endif
980
981     RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
982     layerDidReceiveError(m_displayLayer.get(), error.get());
983 }
984
985 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
986 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
987 ALLOW_NEW_API_WITHOUT_GUARDS_END
988 {
989     LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
990
991     if ([error code] == 'HDCP')
992         m_hdcpError = error;
993
994     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
995     bool anyIgnored = false;
996     for (auto& client : m_errorClients) {
997         bool shouldIgnore = false;
998         client->rendererDidReceiveError(renderer, error, shouldIgnore);
999         anyIgnored |= shouldIgnore;
1000     }
1001     if (anyIgnored)
1002         return;
1003 }
1004
1005 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1006 {
1007     int trackID = trackIDString.toInt();
1008     LOG(MediaSource, "SourceBufferPrivateAVFObjC::flush(%p) - trackId: %d", this, trackID);
1009
1010     if (trackID == m_enabledVideoTrackID) {
1011         flushVideo();
1012     } else if (m_audioRenderers.contains(trackID))
1013         flush(m_audioRenderers.get(trackID).get());
1014 }
1015
1016 void SourceBufferPrivateAVFObjC::flushVideo()
1017 {
1018     [m_displayLayer flush];
1019
1020     if (m_decompressionSession) {
1021         m_decompressionSession->flush();
1022         m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1023             if (weakThis && weakThis->m_mediaSource)
1024                 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1025         });
1026     }
1027
1028     m_cachedSize = std::nullopt;
1029
1030     if (m_mediaSource) {
1031         m_mediaSource->player()->setHasAvailableVideoFrame(false);
1032         m_mediaSource->player()->flushPendingSizeChanges();
1033     }
1034 }
1035
1036 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1037 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1038 ALLOW_NEW_API_WITHOUT_GUARDS_END
1039 {
1040     [renderer flush];
1041
1042     if (m_mediaSource)
1043         m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1044 }
1045
1046 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1047 {
1048     int trackID = trackIDString.toInt();
1049     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1050         return;
1051
1052     PlatformSample platformSample = sample->platformSample();
1053     if (platformSample.type != PlatformSample::CMSampleBufferType)
1054         return;
1055
1056     LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(sample.get()).utf8().data());
1057
1058     if (trackID == m_enabledVideoTrackID) {
1059         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1060         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1061         if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1062             LOG(MediaSource, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
1063             bool sizeWasNull = !m_cachedSize;
1064             m_cachedSize = formatSize;
1065             if (m_mediaSource) {
1066                 if (sizeWasNull)
1067                     m_mediaSource->player()->setNaturalSize(formatSize);
1068                 else
1069                     m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1070             }
1071         }
1072
1073         if (m_decompressionSession)
1074             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1075
1076         if (m_displayLayer) {
1077             if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1078                 auto context = adoptNS([[WebBufferConsumedContext alloc] initWithParent:createWeakPtr()]);
1079                 CMSampleBufferRef rawSampleCopy;
1080                 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1081                 auto sampleCopy = adoptCF(rawSampleCopy);
1082                 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{kBufferConsumedContext: context.get()}, kCMAttachmentMode_ShouldNotPropagate);
1083                 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1084             } else
1085                 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1086         }
1087     } else {
1088         auto renderer = m_audioRenderers.get(trackID);
1089         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1090         if (m_mediaSource && !sample->isNonDisplaying())
1091             m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1092     }
1093 }
1094
1095 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1096 {
1097     if (m_mediaSource)
1098         m_mediaSource->player()->setHasAvailableVideoFrame(true);
1099 }
1100
1101 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1102 {
1103     int trackID = trackIDString.toInt();
1104     if (trackID == m_enabledVideoTrackID) {
1105         if (m_decompressionSession)
1106             return m_decompressionSession->isReadyForMoreMediaData();
1107
1108         return [m_displayLayer isReadyForMoreMediaData];
1109     }
1110
1111     if (m_audioRenderers.contains(trackID))
1112         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1113
1114     return false;
1115 }
1116
1117 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1118 {
1119     if (m_mediaSource)
1120         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1121 }
1122
1123 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
1124 {
1125     if (!m_client)
1126         return time;
1127     return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1128 }
1129
1130 void SourceBufferPrivateAVFObjC::willSeek()
1131 {
1132     flush();
1133 }
1134
1135 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
1136 {
1137     if (m_client)
1138         m_client->sourceBufferPrivateSeekToTime(time);
1139 }
1140
1141 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1142 {
1143     return m_cachedSize.value_or(FloatSize());
1144 }
1145
1146 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1147 {
1148     LOG(Media, "SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(%p) - track(%d)", this, trackID);
1149     if (trackID == m_enabledVideoTrackID) {
1150         if (m_decompressionSession)
1151             m_decompressionSession->stopRequestingMediaData();
1152         [m_displayLayer stopRequestingMediaData];
1153     } else if (m_audioRenderers.contains(trackID))
1154         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1155     else
1156         return;
1157
1158     if (m_client)
1159         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1160 }
1161
1162 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1163 {
1164     int trackID = trackIDString.toInt();
1165     if (trackID == m_enabledVideoTrackID) {
1166         if (m_decompressionSession) {
1167             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1168                 didBecomeReadyForMoreSamples(trackID);
1169             });
1170         }
1171         if (m_displayLayer) {
1172             auto weakThis = createWeakPtr();
1173             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1174                 if (weakThis)
1175                     weakThis->didBecomeReadyForMoreSamples(trackID);
1176             }];
1177         }
1178     } else if (m_audioRenderers.contains(trackID)) {
1179         auto weakThis = createWeakPtr();
1180         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1181             if (weakThis)
1182                 weakThis->didBecomeReadyForMoreSamples(trackID);
1183         }];
1184     }
1185 }
1186
1187 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1188 {
1189     MediaEngineSupportParameters parameters;
1190     parameters.isMediaSource = true;
1191     parameters.type = contentType;
1192     return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1193 }
1194
1195 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1196 {
1197     if (layer == m_displayLayer)
1198         return;
1199
1200     ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1201
1202     if (m_displayLayer) {
1203         [m_displayLayer flush];
1204         [m_displayLayer stopRequestingMediaData];
1205         [m_errorListener stopObservingLayer:m_displayLayer.get()];
1206     }
1207
1208     m_displayLayer = layer;
1209
1210     if (m_displayLayer) {
1211         auto weakThis = createWeakPtr();
1212         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1213             if (weakThis)
1214                 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1215         }];
1216         [m_errorListener beginObservingLayer:m_displayLayer.get()];
1217         if (m_client)
1218             m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1219     }
1220 }
1221
1222 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1223 {
1224     if (m_decompressionSession == decompressionSession)
1225         return;
1226
1227     if (m_decompressionSession) {
1228         m_decompressionSession->stopRequestingMediaData();
1229         m_decompressionSession->invalidate();
1230     }
1231
1232     m_decompressionSession = decompressionSession;
1233
1234     if (!m_decompressionSession)
1235         return;
1236
1237     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1238     m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1239         if (weakThis)
1240             weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1241     });
1242     m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1243         if (weakThis && weakThis->m_mediaSource)
1244             weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1245     });
1246     if (m_client)
1247         m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1248 }
1249
1250 }
1251
1252 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)