4cf2753ae20f39e97b9cd2a0879a73c32da3b68b
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
33 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
34 #import "CDMSessionAVContentKeySession.h"
35 #import "CDMSessionMediaSourceAVFObjC.h"
36 #import "InbandTextTrackPrivateAVFObjC.h"
37 #import "Logging.h"
38 #import "MediaDescription.h"
39 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
40 #import "MediaSample.h"
41 #import "MediaSampleAVFObjC.h"
42 #import "MediaSourcePrivateAVFObjC.h"
43 #import "NotImplemented.h"
44 #import "SharedBuffer.h"
45 #import "SourceBufferPrivateClient.h"
46 #import "TimeRanges.h"
47 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
48 #import "WebCoreDecompressionSession.h"
49 #import <AVFoundation/AVAssetTrack.h>
50 #import <JavaScriptCore/TypedArrayInlines.h>
51 #import <QuartzCore/CALayer.h>
52 #import <objc/runtime.h>
53 #import <pal/avfoundation/MediaTimeAVFoundation.h>
54 #import <pal/spi/mac/AVFoundationSPI.h>
55 #import <wtf/BlockObjCExceptions.h>
56 #import <wtf/HashCountedSet.h>
57 #import <wtf/MainThread.h>
58 #import <wtf/SoftLinking.h>
59 #import <wtf/WeakPtr.h>
60 #import <wtf/text/AtomicString.h>
61 #import <wtf/text/CString.h>
62
63 #pragma mark - Soft Linking
64
65 #import <pal/cf/CoreMediaSoftLink.h>
66
67 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
68
69 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
70 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
71 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
72 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
73 ALLOW_NEW_API_WITHOUT_GUARDS_END
74 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
75 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
76
77 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
82
83 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
84 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
85
86 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
87 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
88 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
89
90 #pragma mark -
91 #pragma mark AVStreamSession
92
93 @interface AVStreamSession : NSObject
94 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
95 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
96 @end
97
98 #pragma mark -
99 #pragma mark WebAVStreamDataParserListener
100
101 @interface WebAVStreamDataParserListener : NSObject<AVStreamDataParserOutputHandling> {
102     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
103     OSObjectPtr<dispatch_semaphore_t> _abortSemaphore;
104     AVStreamDataParser* _parser;
105 }
106 @property (assign) WeakPtr<WebCore::SourceBufferPrivateAVFObjC> parent;
107 @property (assign) OSObjectPtr<dispatch_semaphore_t> abortSemaphore;
108 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
109 @end
110
111 @implementation WebAVStreamDataParserListener
112 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
113 {
114     self = [super init];
115     if (!self)
116         return nil;
117
118     ASSERT(parent);
119     _parent = parent;
120     _parser = parser;
121     [_parser setDelegate:self];
122     return self;
123 }
124
125 @synthesize parent=_parent;
126 @synthesize abortSemaphore=_abortSemaphore;
127
128 - (void)dealloc
129 {
130     [_parser setDelegate:nil];
131     [super dealloc];
132 }
133
134 - (void)invalidate
135 {
136     [_parser setDelegate:nil];
137     _parser = nullptr;
138 }
139
140 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
141 {
142     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
143
144     RetainPtr<AVAsset*> protectedAsset = asset;
145     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
146         if (parent)
147             parent->didParseStreamDataAsAsset(protectedAsset.get());
148     });
149 }
150
151 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
152 {
153     UNUSED_PARAM(discontinuity);
154     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
155
156     RetainPtr<AVAsset*> protectedAsset = asset;
157     callOnMainThread([parent = _parent, protectedAsset = WTFMove(protectedAsset)] {
158         if (parent)
159             parent->didParseStreamDataAsAsset(protectedAsset.get());
160     });
161 }
162
163 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
164 {
165     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
166
167     RetainPtr<NSError> protectedError = error;
168     callOnMainThread([parent = _parent, protectedError = WTFMove(protectedError)] {
169         if (parent)
170             parent->didFailToParseStreamDataWithError(protectedError.get());
171     });
172 }
173
174 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(AVStreamDataParserOutputMediaDataFlags)flags
175 {
176     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
177
178     RetainPtr<CMSampleBufferRef> protectedSample = sample;
179     callOnMainThread([parent = _parent, protectedSample = WTFMove(protectedSample), trackID, mediaType = String(nsMediaType), flags] {
180         if (parent)
181             parent->didProvideMediaDataForTrackID(trackID, protectedSample.get(), mediaType, flags);
182     });
183 }
184
185 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
186 {
187     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
188
189     callOnMainThread([parent = _parent, trackID, mediaType = String(nsMediaType)] {
190         if (parent)
191             parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
192     });
193 }
194
195 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
196 {
197     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
198
199     // We must call synchronously to the main thread, as the AVStreamSession must be associated
200     // with the streamDataParser before the delegate method returns.
201     OSObjectPtr<dispatch_semaphore_t> respondedSemaphore = adoptOSObject(dispatch_semaphore_create(0));
202     callOnMainThread([parent = _parent, trackID, respondedSemaphore]() {
203         if (parent)
204             parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
205         dispatch_semaphore_signal(respondedSemaphore.get());
206     });
207
208     while (true) {
209         if (!dispatch_semaphore_wait(respondedSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100)))
210             return;
211
212         if (!dispatch_semaphore_wait(_abortSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100))) {
213             dispatch_semaphore_signal(_abortSemaphore.get());
214             return;
215         }
216     }
217 }
218
219 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
220 {
221     ASSERT_UNUSED(streamDataParser, streamDataParser == _parser);
222
223     OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore = adoptOSObject(dispatch_semaphore_create(0));
224     callOnMainThread([parent = _parent, protectedInitData = RetainPtr<NSData>(initData), trackID, hasSessionSemaphore] {
225         if (parent)
226             parent->didProvideContentKeyRequestInitializationDataForTrackID(protectedInitData.get(), trackID, hasSessionSemaphore);
227     });
228
229     while (true) {
230         if (!dispatch_semaphore_wait(hasSessionSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100)))
231             return;
232
233         if (!dispatch_semaphore_wait(_abortSemaphore.get(), dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_MSEC * 100))) {
234             dispatch_semaphore_signal(_abortSemaphore.get());
235             return;
236         }
237     }
238 }
239 @end
240
241 @interface WebAVSampleBufferErrorListener : NSObject {
242     WebCore::SourceBufferPrivateAVFObjC* _parent;
243     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
244     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
245     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
246     ALLOW_NEW_API_WITHOUT_GUARDS_END
247 }
248
249 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
250 - (void)invalidate;
251 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
252 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
253 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
254 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
255 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
256 ALLOW_NEW_API_WITHOUT_GUARDS_END
257 @end
258
259 @implementation WebAVSampleBufferErrorListener
260
261 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
262 {
263     if (!(self = [super init]))
264         return nil;
265
266     _parent = parent;
267     return self;
268 }
269
270 - (void)dealloc
271 {
272     [self invalidate];
273     [super dealloc];
274 }
275
276 - (void)invalidate
277 {
278     if (!_parent && !_layers.size() && !_renderers.size())
279         return;
280
281     for (auto& layer : _layers) {
282         [layer removeObserver:self forKeyPath:@"error"];
283         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
284     }
285     _layers.clear();
286
287     for (auto& renderer : _renderers)
288         [renderer removeObserver:self forKeyPath:@"error"];
289     _renderers.clear();
290
291     [[NSNotificationCenter defaultCenter] removeObserver:self];
292
293     _parent = nullptr;
294 }
295
296 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
297 {
298     ASSERT(_parent);
299     ASSERT(!_layers.contains(layer));
300
301     _layers.append(layer);
302     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
303     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
304     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
305 }
306
307 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
308 {
309     ASSERT(_parent);
310     ASSERT(_layers.contains(layer));
311
312     [layer removeObserver:self forKeyPath:@"error"];
313     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
314     _layers.remove(_layers.find(layer));
315
316     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
317 }
318
319 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
320 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
321 {
322 ALLOW_NEW_API_WITHOUT_GUARDS_END
323     ASSERT(_parent);
324     ASSERT(!_renderers.contains(renderer));
325
326     _renderers.append(renderer);
327     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
328 }
329
330 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
331 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
332 ALLOW_NEW_API_WITHOUT_GUARDS_END
333 {
334     ASSERT(_parent);
335     ASSERT(_renderers.contains(renderer));
336
337     [renderer removeObserver:self forKeyPath:@"error"];
338     _renderers.remove(_renderers.find(renderer));
339 }
340
341 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
342 {
343     UNUSED_PARAM(context);
344     UNUSED_PARAM(keyPath);
345     ASSERT(_parent);
346
347     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
348     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
349         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
350         ASSERT(_layers.contains(layer.get()));
351
352         if ([keyPath isEqualTo:@"error"]) {
353             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
354             callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
355                 protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
356             });
357         } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
358             callOnMainThread([protectedSelf = WTFMove(protectedSelf), obscured = [[change valueForKey:NSKeyValueChangeNewKey] boolValue]] {
359                 protectedSelf->_parent->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
360             });
361         } else
362             ASSERT_NOT_REACHED();
363
364     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
365         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
366         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
367         ALLOW_NEW_API_WITHOUT_GUARDS_END
368         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
369
370         ASSERT(_renderers.contains(renderer.get()));
371         ASSERT([keyPath isEqualTo:@"error"]);
372
373         callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), error = WTFMove(error)] {
374             protectedSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
375         });
376     } else
377         ASSERT_NOT_REACHED();
378 }
379
380 - (void)layerFailedToDecode:(NSNotification*)note
381 {
382     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
383     RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
384
385     RetainPtr<WebAVSampleBufferErrorListener> protectedSelf = self;
386     callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), error = WTFMove(error)] {
387         if (!protectedSelf->_parent || !protectedSelf->_layers.contains(layer.get()))
388             return;
389         protectedSelf->_parent->layerDidReceiveError(layer.get(), error.get());
390     });
391 }
392 @end
393
394 #pragma mark -
395
396 @interface WebBufferConsumedContext : NSObject {
397     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
398 }
399 @property (readonly) WebCore::SourceBufferPrivateAVFObjC* parent;
400 @end
401
402 @implementation WebBufferConsumedContext
403 - (id)initWithParent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
404 {
405     self = [super init];
406     if (self)
407         _parent = parent;
408     return self;
409 }
410
411 @dynamic parent;
412 - (WebCore::SourceBufferPrivateAVFObjC*)parent
413 {
414     return _parent.get();
415 }
416 @end
417
418 namespace WebCore {
419 using namespace PAL;
420
421 #pragma mark -
422 #pragma mark MediaDescriptionAVFObjC
423
424 class MediaDescriptionAVFObjC final : public MediaDescription {
425 public:
426     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
427     virtual ~MediaDescriptionAVFObjC() { }
428
429     AtomicString codec() const override { return m_codec; }
430     bool isVideo() const override { return m_isVideo; }
431     bool isAudio() const override { return m_isAudio; }
432     bool isText() const override { return m_isText; }
433     
434 protected:
435     MediaDescriptionAVFObjC(AVAssetTrack* track)
436         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
437         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
438         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
439     {
440         NSArray* formatDescriptions = [track formatDescriptions];
441         CMFormatDescriptionRef description = [formatDescriptions count] ? (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
442         if (description) {
443             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
444             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
445         }
446     }
447
448     AtomicString m_codec;
449     bool m_isVideo;
450     bool m_isAudio;
451     bool m_isText;
452 };
453
454 #pragma mark -
455 #pragma mark SourceBufferPrivateAVFObjC
456
457 static NSString *kBufferConsumedContext = @"BufferConsumedContext";
458
459 static void bufferWasConsumedCallback(CMNotificationCenterRef, const void*, CFStringRef notificationName, const void*, CFTypeRef payload)
460 {
461     if (!isMainThread()) {
462         callOnMainThread([notificationName, payload = retainPtr(payload)] {
463             bufferWasConsumedCallback(nullptr, nullptr, notificationName, nullptr, payload.get());
464         });
465         return;
466     }
467
468     if (!CFEqual(kCMSampleBufferConsumerNotification_BufferConsumed, notificationName))
469         return;
470
471     ASSERT(CFGetTypeID(payload) == CFDictionaryGetTypeID());
472     WebBufferConsumedContext *context = [(__bridge NSDictionary *)payload valueForKey:kBufferConsumedContext];
473     if (!context)
474         return;
475
476     if (auto sourceBuffer = context.parent)
477         sourceBuffer->bufferWasConsumed();
478 }
479
480 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
481 {
482     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
483 }
484
485 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
486     : m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
487     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
488     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
489     , m_isAppendingGroup(adoptOSObject(dispatch_group_create()))
490     , m_mediaSource(parent)
491 {
492     CMNotificationCenterAddListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr, 0);
493     m_delegate.get().abortSemaphore = adoptOSObject(dispatch_semaphore_create(0));
494 }
495
496 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
497 {
498     ASSERT(!m_client);
499     destroyParser();
500     destroyRenderers();
501
502     CMNotificationCenterRemoveListener(CMNotificationCenterGetDefaultLocalCenter(), this, bufferWasConsumedCallback, kCMSampleBufferConsumerNotification_BufferConsumed, nullptr);
503
504     if (m_hasSessionSemaphore)
505         dispatch_semaphore_signal(m_hasSessionSemaphore.get());
506 }
507
508 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
509 {
510     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
511
512     if (!m_mediaSource)
513         return;
514
515     if (m_mediaSource->player()->shouldCheckHardwareSupport()) {
516         for (AVAssetTrack *track in [asset tracks]) {
517             if (!assetTrackMeetsHardwareDecodeRequirements(track, m_mediaSource->player()->mediaContentTypesRequiringHardwareSupport())) {
518                 m_parsingSucceeded = false;
519                 return;
520             }
521         }
522     }
523
524     m_asset = asset;
525
526     m_videoTracks.clear();
527     m_audioTracks.clear();
528
529     m_discardSamplesUntilNextInitializationSegment = false;
530
531     SourceBufferPrivateClient::InitializationSegment segment;
532
533     if ([m_asset respondsToSelector:@selector(overallDurationHint)])
534         segment.duration = PAL::toMediaTime([m_asset overallDurationHint]);
535
536     if (segment.duration.isInvalid() || segment.duration == MediaTime::zeroTime())
537         segment.duration = PAL::toMediaTime([m_asset duration]);
538
539     for (AVAssetTrack* track in [m_asset tracks]) {
540         if ([track hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
541             // FIXME(125161): Handle in-band text tracks.
542             continue;
543         }
544
545         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
546             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
547             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
548             info.track = videoTrack;
549             m_videoTracks.append(videoTrack);
550             info.description = MediaDescriptionAVFObjC::create(track);
551             segment.videoTracks.append(info);
552         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
553             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
554             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
555             info.track = audioTrack;
556             m_audioTracks.append(audioTrack);
557             info.description = MediaDescriptionAVFObjC::create(track);
558             segment.audioTracks.append(info);
559         }
560
561         // FIXME(125161): Add TextTrack support
562     }
563
564     if (m_mediaSource)
565         m_mediaSource->player()->characteristicsChanged();
566
567     if (m_client)
568         m_client->sourceBufferPrivateDidReceiveInitializationSegment(segment);
569 }
570
571 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError *error)
572 {
573 #if LOG_DISABLED
574     UNUSED_PARAM(error);
575 #endif
576     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
577
578     m_parsingSucceeded = false;
579 }
580
581 struct ProcessCodedFrameInfo {
582     SourceBufferPrivateAVFObjC* sourceBuffer;
583     int trackID;
584     const String& mediaType;
585 };
586
587 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned)
588 {
589     processCodedFrame(trackID, sampleBuffer, mediaType);
590 }
591
592 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
593 {
594     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID)) {
595         // FIXME(125161): We don't handle text tracks, and passing this sample up to SourceBuffer
596         // will just confuse its state. Drop this sample until we can handle text tracks properly.
597         return false;
598     }
599
600     if (m_discardSamplesUntilNextInitializationSegment)
601         return false;
602
603     if (m_client) {
604         Ref<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
605         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(mediaSample.get()).utf8().data());
606         m_client->sourceBufferPrivateDidReceiveSample(mediaSample);
607     }
608
609     return true;
610 }
611
612 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int, const String&)
613 {
614     notImplemented();
615 }
616
617 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
618 {
619     if (!m_mediaSource)
620         return;
621
622     ASSERT(m_parser);
623
624 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
625     LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
626     m_protectedTrackID = trackID;
627
628     if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
629         session->addParser(m_parser.get());
630     else if (!CDMSessionAVContentKeySession::isAvailable()) {
631         BEGIN_BLOCK_OBJC_EXCEPTIONS;
632         [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
633         END_BLOCK_OBJC_EXCEPTIONS;
634     }
635 #else
636     UNUSED_PARAM(trackID);
637 #endif
638 }
639
640 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore)
641 {
642     if (!m_mediaSource)
643         return;
644
645 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
646     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
647     m_protectedTrackID = trackID;
648     RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
649     [initData getBytes:initDataArray->data() length:initDataArray->length()];
650     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
651     if (auto session = m_mediaSource->player()->cdmSession()) {
652         session->addParser(m_parser.get());
653         dispatch_semaphore_signal(hasSessionSemaphore.get());
654     } else {
655         if (m_hasSessionSemaphore)
656             dispatch_semaphore_signal(m_hasSessionSemaphore.get());
657         m_hasSessionSemaphore = hasSessionSemaphore;
658     }
659 #endif
660
661 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
662     if (m_mediaSource) {
663         auto initDataBuffer = SharedBuffer::create(initData);
664         m_mediaSource->player()->initializationDataEncountered("sinf", initDataBuffer->tryCreateArrayBuffer());
665     }
666 #endif
667
668     UNUSED_PARAM(initData);
669     UNUSED_PARAM(trackID);
670     UNUSED_PARAM(hasSessionSemaphore);
671 }
672
673 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
674 {
675     m_client = client;
676 }
677
678 static dispatch_queue_t globalDataParserQueue()
679 {
680     static dispatch_queue_t globalQueue;
681     static dispatch_once_t onceToken;
682     dispatch_once(&onceToken, ^{
683         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
684     });
685     return globalQueue;
686 }
687
688 void SourceBufferPrivateAVFObjC::append(Vector<unsigned char>&& data)
689 {
690     LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data.data(), data.size());
691
692     // FIXME: Avoid the data copy by wrapping around the Vector<> object.
693     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data.data() length:data.size()]);
694     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = m_appendWeakFactory.createWeakPtr(*this);
695     RetainPtr<AVStreamDataParser> parser = m_parser;
696     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
697
698     m_parsingSucceeded = true;
699     dispatch_group_enter(m_isAppendingGroup.get());
700
701     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate, isAppendingGroup = m_isAppendingGroup, parserStateWasReset = m_parserStateWasReset] {
702         if (parserStateWasReset)
703             [parser appendStreamData:nsData.get() withFlags:AVStreamDataParserStreamDataDiscontinuity];
704         else
705             [parser appendStreamData:nsData.get()];
706
707         callOnMainThread([weakThis] {
708             if (weakThis)
709                 weakThis->appendCompleted();
710         });
711         dispatch_group_leave(isAppendingGroup.get());
712     });
713     m_parserStateWasReset = false;
714 }
715
716 void SourceBufferPrivateAVFObjC::appendCompleted()
717 {
718     if (m_parsingSucceeded && m_mediaSource)
719         m_mediaSource->player()->setLoadingProgresssed(true);
720
721     if (m_client)
722         m_client->sourceBufferPrivateAppendComplete(m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
723 }
724
725 void SourceBufferPrivateAVFObjC::abort()
726 {
727     // The parsing queue may be blocked waiting for the main thread to provide it a AVStreamSession. We
728     // were asked to abort, and that cancels all outstanding append operations. Without cancelling this
729     // semaphore, the m_isAppendingGroup wait operation will deadlock.
730     if (m_hasSessionSemaphore)
731         dispatch_semaphore_signal(m_hasSessionSemaphore.get());
732     dispatch_semaphore_signal(m_delegate.get().abortSemaphore.get());
733     dispatch_group_wait(m_isAppendingGroup.get(), DISPATCH_TIME_FOREVER);
734     m_appendWeakFactory.revokeAll();
735     m_delegate.get().parent = m_appendWeakFactory.createWeakPtr(*this);
736     m_delegate.get().abortSemaphore = adoptOSObject(dispatch_semaphore_create(0));
737 }
738
739 void SourceBufferPrivateAVFObjC::resetParserState()
740 {
741     m_parserStateWasReset = true;
742     m_discardSamplesUntilNextInitializationSegment = true;
743 }
744
745 void SourceBufferPrivateAVFObjC::destroyParser()
746 {
747 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
748     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
749         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
750 #endif
751 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
752     if (m_cdmInstance)
753         [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
754 #endif
755
756     [m_delegate invalidate];
757     m_delegate = nullptr;
758     m_parser = nullptr;
759 }
760
761 void SourceBufferPrivateAVFObjC::destroyRenderers()
762 {
763     if (m_displayLayer)
764         setVideoLayer(nullptr);
765
766     if (m_decompressionSession)
767         setDecompressionSession(nullptr);
768
769     for (auto& renderer : m_audioRenderers.values()) {
770         if (m_mediaSource)
771             m_mediaSource->player()->removeAudioRenderer(renderer.get());
772         [renderer flush];
773         [renderer stopRequestingMediaData];
774         [m_errorListener stopObservingRenderer:renderer.get()];
775     }
776
777     m_audioRenderers.clear();
778 }
779
780 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
781 {
782     destroyParser();
783     destroyRenderers();
784
785     if (m_mediaSource)
786         m_mediaSource->removeSourceBuffer(this);
787 }
788
789 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
790 {
791     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
792 }
793
794 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
795 {
796     if (m_mediaSource)
797         m_mediaSource->player()->setReadyState(readyState);
798 }
799
800 bool SourceBufferPrivateAVFObjC::hasVideo() const
801 {
802     return m_client && m_client->sourceBufferPrivateHasVideo();
803 }
804
805 bool SourceBufferPrivateAVFObjC::hasSelectedVideo() const
806 {
807     return m_enabledVideoTrackID != -1;
808 }
809
810 bool SourceBufferPrivateAVFObjC::hasAudio() const
811 {
812     return m_client && m_client->sourceBufferPrivateHasAudio();
813 }
814
815 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
816 {
817     int trackID = track->trackID();
818     if (!track->selected() && m_enabledVideoTrackID == trackID) {
819         m_enabledVideoTrackID = -1;
820         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
821
822         if (m_decompressionSession)
823             m_decompressionSession->stopRequestingMediaData();
824     } else if (track->selected()) {
825         m_enabledVideoTrackID = trackID;
826         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
827
828         if (m_decompressionSession) {
829             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
830                 didBecomeReadyForMoreSamples(trackID);
831             });
832         }
833     }
834
835     m_mediaSource->hasSelectedVideoChanged(*this);
836 }
837
838 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
839 {
840     int trackID = track->trackID();
841
842     if (!track->enabled()) {
843         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
844         RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
845         ALLOW_NEW_API_WITHOUT_GUARDS_END
846         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
847         if (m_mediaSource)
848             m_mediaSource->player()->removeAudioRenderer(renderer.get());
849     } else {
850         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
851         ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
852         RetainPtr<AVSampleBufferAudioRenderer> renderer;
853         ALLOW_NEW_API_WITHOUT_GUARDS_END
854         if (!m_audioRenderers.contains(trackID)) {
855             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
856             auto weakThis = createWeakPtr();
857             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
858                 if (weakThis)
859                     weakThis->didBecomeReadyForMoreSamples(trackID);
860             }];
861             m_audioRenderers.set(trackID, renderer);
862             [m_errorListener beginObservingRenderer:renderer.get()];
863         } else
864             renderer = m_audioRenderers.get(trackID);
865
866         if (m_mediaSource)
867             m_mediaSource->player()->addAudioRenderer(renderer.get());
868     }
869 }
870
871 void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
872 {
873 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
874     if (session == m_session)
875         return;
876
877     if (m_session)
878         m_session->removeSourceBuffer(this);
879
880     m_session = session;
881
882     if (m_session) {
883         m_session->addSourceBuffer(this);
884         if (m_hasSessionSemaphore) {
885             dispatch_semaphore_signal(m_hasSessionSemaphore.get());
886             m_hasSessionSemaphore = nullptr;
887         }
888
889         if (m_hdcpError) {
890             WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
891             callOnMainThread([weakThis] {
892                 if (!weakThis || !weakThis->m_session || !weakThis->m_hdcpError)
893                     return;
894
895                 bool ignored = false;
896                 weakThis->m_session->layerDidReceiveError(nullptr, weakThis->m_hdcpError.get(), ignored);
897             });
898         }
899     }
900 #else
901     UNUSED_PARAM(session);
902 #endif
903 }
904
905 void SourceBufferPrivateAVFObjC::setCDMInstance(CDMInstance* instance)
906 {
907 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
908     auto* fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
909     if (!fpsInstance || fpsInstance == m_cdmInstance)
910         return;
911
912     if (m_cdmInstance)
913         [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_parser.get()];
914
915     m_cdmInstance = fpsInstance;
916
917     if (m_cdmInstance) {
918         [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_parser.get()];
919         if (m_hasSessionSemaphore) {
920             dispatch_semaphore_signal(m_hasSessionSemaphore.get());
921             m_hasSessionSemaphore = nullptr;
922         }
923     }
924 #else
925     UNUSED_PARAM(instance);
926 #endif
927 }
928
929 void SourceBufferPrivateAVFObjC::flush()
930 {
931     flushVideo();
932
933     for (auto& renderer : m_audioRenderers.values())
934         flush(renderer.get());
935 }
936
937 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
938 {
939     ASSERT(!m_errorClients.contains(client));
940     m_errorClients.append(client);
941 }
942
943 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
944 {
945     ASSERT(m_errorClients.contains(client));
946     m_errorClients.remove(m_errorClients.find(client));
947 }
948
949 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
950 {
951     LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
952
953     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
954     bool anyIgnored = false;
955     for (auto& client : m_errorClients) {
956         bool shouldIgnore = false;
957         client->layerDidReceiveError(layer, error, shouldIgnore);
958         anyIgnored |= shouldIgnore;
959     }
960     if (anyIgnored)
961         return;
962
963     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
964
965     if (m_client)
966         m_client->sourceBufferPrivateDidReceiveRenderingError(errorCode);
967 }
968
969 void SourceBufferPrivateAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
970 {
971 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
972     if (m_mediaSource->player()->cdmInstance()) {
973         m_mediaSource->player()->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
974         return;
975     }
976 #else
977     UNUSED_PARAM(obscured);
978 #endif
979
980     RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
981     layerDidReceiveError(m_displayLayer.get(), error.get());
982 }
983
984 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
985 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
986 ALLOW_NEW_API_WITHOUT_GUARDS_END
987 {
988     LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
989
990     if ([error code] == 'HDCP')
991         m_hdcpError = error;
992
993     // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
994     bool anyIgnored = false;
995     for (auto& client : m_errorClients) {
996         bool shouldIgnore = false;
997         client->rendererDidReceiveError(renderer, error, shouldIgnore);
998         anyIgnored |= shouldIgnore;
999     }
1000     if (anyIgnored)
1001         return;
1002 }
1003
1004 void SourceBufferPrivateAVFObjC::flush(const AtomicString& trackIDString)
1005 {
1006     int trackID = trackIDString.toInt();
1007     LOG(MediaSource, "SourceBufferPrivateAVFObjC::flush(%p) - trackId: %d", this, trackID);
1008
1009     if (trackID == m_enabledVideoTrackID) {
1010         flushVideo();
1011     } else if (m_audioRenderers.contains(trackID))
1012         flush(m_audioRenderers.get(trackID).get());
1013 }
1014
1015 void SourceBufferPrivateAVFObjC::flushVideo()
1016 {
1017     [m_displayLayer flush];
1018
1019     if (m_decompressionSession) {
1020         m_decompressionSession->flush();
1021         m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1022             if (weakThis && weakThis->m_mediaSource)
1023                 weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1024         });
1025     }
1026
1027     m_cachedSize = std::nullopt;
1028
1029     if (m_mediaSource) {
1030         m_mediaSource->player()->setHasAvailableVideoFrame(false);
1031         m_mediaSource->player()->flushPendingSizeChanges();
1032     }
1033 }
1034
1035 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1036 void SourceBufferPrivateAVFObjC::flush(AVSampleBufferAudioRenderer *renderer)
1037 ALLOW_NEW_API_WITHOUT_GUARDS_END
1038 {
1039     [renderer flush];
1040
1041     if (m_mediaSource)
1042         m_mediaSource->player()->setHasAvailableAudioSample(renderer, false);
1043 }
1044
1045 void SourceBufferPrivateAVFObjC::enqueueSample(Ref<MediaSample>&& sample, const AtomicString& trackIDString)
1046 {
1047     int trackID = trackIDString.toInt();
1048     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
1049         return;
1050
1051     PlatformSample platformSample = sample->platformSample();
1052     if (platformSample.type != PlatformSample::CMSampleBufferType)
1053         return;
1054
1055     LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(sample.get()).utf8().data());
1056
1057     if (trackID == m_enabledVideoTrackID) {
1058         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(platformSample.sample.cmSampleBuffer);
1059         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
1060         if (!m_cachedSize || formatSize != m_cachedSize.value()) {
1061             LOG(MediaSource, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
1062             bool sizeWasNull = !m_cachedSize;
1063             m_cachedSize = formatSize;
1064             if (m_mediaSource) {
1065                 if (sizeWasNull)
1066                     m_mediaSource->player()->setNaturalSize(formatSize);
1067                 else
1068                     m_mediaSource->player()->sizeWillChangeAtTime(sample->presentationTime(), formatSize);
1069             }
1070         }
1071
1072         if (m_decompressionSession)
1073             m_decompressionSession->enqueueSample(platformSample.sample.cmSampleBuffer);
1074
1075         if (m_displayLayer) {
1076             if (m_mediaSource && !m_mediaSource->player()->hasAvailableVideoFrame() && !sample->isNonDisplaying()) {
1077                 auto context = adoptNS([[WebBufferConsumedContext alloc] initWithParent:createWeakPtr()]);
1078                 CMSampleBufferRef rawSampleCopy;
1079                 CMSampleBufferCreateCopy(kCFAllocatorDefault, platformSample.sample.cmSampleBuffer, &rawSampleCopy);
1080                 auto sampleCopy = adoptCF(rawSampleCopy);
1081                 CMSetAttachment(sampleCopy.get(), kCMSampleBufferAttachmentKey_PostNotificationWhenConsumed, (__bridge CFDictionaryRef)@{kBufferConsumedContext: context.get()}, kCMAttachmentMode_ShouldNotPropagate);
1082                 [m_displayLayer enqueueSampleBuffer:sampleCopy.get()];
1083             } else
1084                 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1085         }
1086     } else {
1087         auto renderer = m_audioRenderers.get(trackID);
1088         [renderer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1089         if (m_mediaSource && !sample->isNonDisplaying())
1090             m_mediaSource->player()->setHasAvailableAudioSample(renderer.get(), true);
1091     }
1092 }
1093
1094 void SourceBufferPrivateAVFObjC::bufferWasConsumed()
1095 {
1096     if (m_mediaSource)
1097         m_mediaSource->player()->setHasAvailableVideoFrame(true);
1098 }
1099
1100 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(const AtomicString& trackIDString)
1101 {
1102     int trackID = trackIDString.toInt();
1103     if (trackID == m_enabledVideoTrackID) {
1104         if (m_decompressionSession)
1105             return m_decompressionSession->isReadyForMoreMediaData();
1106
1107         return [m_displayLayer isReadyForMoreMediaData];
1108     }
1109
1110     if (m_audioRenderers.contains(trackID))
1111         return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
1112
1113     return false;
1114 }
1115
1116 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1117 {
1118     if (m_mediaSource)
1119         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1120 }
1121
1122 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
1123 {
1124     if (!m_client)
1125         return time;
1126     return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(time, negativeThreshold, positiveThreshold);
1127 }
1128
1129 void SourceBufferPrivateAVFObjC::willSeek()
1130 {
1131     flush();
1132 }
1133
1134 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
1135 {
1136     if (m_client)
1137         m_client->sourceBufferPrivateSeekToTime(time);
1138 }
1139
1140 FloatSize SourceBufferPrivateAVFObjC::naturalSize()
1141 {
1142     return m_cachedSize.value_or(FloatSize());
1143 }
1144
1145 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1146 {
1147     LOG(Media, "SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(%p) - track(%d)", this, trackID);
1148     if (trackID == m_enabledVideoTrackID) {
1149         if (m_decompressionSession)
1150             m_decompressionSession->stopRequestingMediaData();
1151         [m_displayLayer stopRequestingMediaData];
1152     } else if (m_audioRenderers.contains(trackID))
1153         [m_audioRenderers.get(trackID) stopRequestingMediaData];
1154     else
1155         return;
1156
1157     if (m_client)
1158         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(AtomicString::number(trackID));
1159 }
1160
1161 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(const AtomicString& trackIDString)
1162 {
1163     int trackID = trackIDString.toInt();
1164     if (trackID == m_enabledVideoTrackID) {
1165         if (m_decompressionSession) {
1166             m_decompressionSession->requestMediaDataWhenReady([this, trackID] {
1167                 didBecomeReadyForMoreSamples(trackID);
1168             });
1169         }
1170         if (m_displayLayer) {
1171             auto weakThis = createWeakPtr();
1172             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1173                 if (weakThis)
1174                     weakThis->didBecomeReadyForMoreSamples(trackID);
1175             }];
1176         }
1177     } else if (m_audioRenderers.contains(trackID)) {
1178         auto weakThis = createWeakPtr();
1179         [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1180             if (weakThis)
1181                 weakThis->didBecomeReadyForMoreSamples(trackID);
1182         }];
1183     }
1184 }
1185
1186 bool SourceBufferPrivateAVFObjC::canSwitchToType(const ContentType& contentType)
1187 {
1188     MediaEngineSupportParameters parameters;
1189     parameters.isMediaSource = true;
1190     parameters.type = contentType;
1191     return MediaPlayerPrivateMediaSourceAVFObjC::supportsType(parameters) != MediaPlayer::IsNotSupported;
1192 }
1193
1194 void SourceBufferPrivateAVFObjC::setVideoLayer(AVSampleBufferDisplayLayer* layer)
1195 {
1196     if (layer == m_displayLayer)
1197         return;
1198
1199     ASSERT(!layer || !m_decompressionSession || hasSelectedVideo());
1200
1201     if (m_displayLayer) {
1202         [m_displayLayer flush];
1203         [m_displayLayer stopRequestingMediaData];
1204         [m_errorListener stopObservingLayer:m_displayLayer.get()];
1205     }
1206
1207     m_displayLayer = layer;
1208
1209     if (m_displayLayer) {
1210         auto weakThis = createWeakPtr();
1211         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
1212             if (weakThis)
1213                 weakThis->didBecomeReadyForMoreSamples(m_enabledVideoTrackID);
1214         }];
1215         [m_errorListener beginObservingLayer:m_displayLayer.get()];
1216         if (m_client)
1217             m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1218     }
1219 }
1220
1221 void SourceBufferPrivateAVFObjC::setDecompressionSession(WebCoreDecompressionSession* decompressionSession)
1222 {
1223     if (m_decompressionSession == decompressionSession)
1224         return;
1225
1226     if (m_decompressionSession) {
1227         m_decompressionSession->stopRequestingMediaData();
1228         m_decompressionSession->invalidate();
1229     }
1230
1231     m_decompressionSession = decompressionSession;
1232
1233     if (!m_decompressionSession)
1234         return;
1235
1236     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
1237     m_decompressionSession->requestMediaDataWhenReady([weakThis] {
1238         if (weakThis)
1239             weakThis->didBecomeReadyForMoreSamples(weakThis->m_enabledVideoTrackID);
1240     });
1241     m_decompressionSession->notifyWhenHasAvailableVideoFrame([weakThis = createWeakPtr()] {
1242         if (weakThis && weakThis->m_mediaSource)
1243             weakThis->m_mediaSource->player()->setHasAvailableVideoFrame(true);
1244     });
1245     if (m_client)
1246         m_client->sourceBufferPrivateReenqueSamples(AtomicString::number(m_enabledVideoTrackID));
1247 }
1248
1249 }
1250
1251 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)