[MSE][Mac] Crash at WebCore::SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "BlockExceptions.h"
32 #import "ExceptionCodePlaceholder.h"
33 #import "Logging.h"
34 #import "MediaDescription.h"
35 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
36 #import "MediaSample.h"
37 #import "MediaSourcePrivateAVFObjC.h"
38 #import "MediaTimeAVFoundation.h"
39 #import "NotImplemented.h"
40 #import "SoftLinking.h"
41 #import "SourceBufferPrivateClient.h"
42 #import "TimeRanges.h"
43 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
44 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import <AVFoundation/AVAssetTrack.h>
47 #import <CoreMedia/CMSampleBuffer.h>
48 #import <QuartzCore/CALayer.h>
49 #import <objc/runtime.h>
50 #import <wtf/text/AtomicString.h>
51 #import <wtf/text/CString.h>
52 #import <wtf/HashCountedSet.h>
53 #import <wtf/WeakPtr.h>
54 #import <map>
55
56 #pragma mark -
57 #pragma mark Soft Linking
58
59 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
60 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
61
62 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
63 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
64 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
65 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
66 SOFT_LINK_CLASS(AVFoundation, AVStreamSession)
67
68 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
69 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
70 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
71
72 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
73 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
74 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
75 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
76 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
77 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
78 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
79 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
80
81 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
82 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
83 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
84 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
85 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
86
87 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
88 SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
89 SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
90 SOFT_LINK(CoreMedia, CMSampleBufferCreateCopyWithNewTiming, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef originalSBuf, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMSampleBufferRef *sBufCopyOut), (allocator, originalSBuf, numSampleTimingEntries, sampleTimingArray, sBufCopyOut))
91 SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
92 SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
93 SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
94 SOFT_LINK(CoreMedia, CMSampleBufferGetFormatDescription, CMFormatDescriptionRef, (CMSampleBufferRef sbuf), (sbuf))
95 SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
96 SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
97 SOFT_LINK(CoreMedia,  CMSampleBufferGetSampleTimingInfoArray, OSStatus, (CMSampleBufferRef sbuf, CMItemCount timingArrayEntries, CMSampleTimingInfo *timingArrayOut, CMItemCount *timingArrayEntriesNeededOut), (sbuf, timingArrayEntries, timingArrayOut, timingArrayEntriesNeededOut))
98 SOFT_LINK(CoreMedia, CMSampleBufferGetTotalSampleSize, size_t, (CMSampleBufferRef sbuf), (sbuf))
99 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
100 SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
101 SOFT_LINK(CoreMedia, CMVideoFormatDescriptionGetPresentationDimensions, CGSize, (CMVideoFormatDescriptionRef videoDesc, Boolean usePixelAspectRatio, Boolean useCleanAperture), (videoDesc, usePixelAspectRatio, useCleanAperture))
102
103 #define AVMediaTypeVideo getAVMediaTypeVideo()
104 #define AVMediaTypeAudio getAVMediaTypeAudio()
105 #define AVMediaTypeText getAVMediaTypeText()
106 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
107 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
108 #define kCMTimeZero getkCMTimeZero()
109 #define kCMTimeInvalid getkCMTimeInvalid()
110 #define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
111 #define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
112 #define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
113 #define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
114 #define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
115 #define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
116
117 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
118 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
119 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
120
121 #pragma mark -
122 #pragma mark AVStreamSession
123
124 @interface AVStreamSession : NSObject
125 - (void)addStreamDataParser:(AVStreamDataParser *)streamDataParser;
126 - (void)removeStreamDataParser:(AVStreamDataParser *)streamDataParser;
127 @end
128
129 #pragma mark -
130 #pragma mark AVStreamDataParser
131
132 @interface AVStreamDataParser : NSObject
133 - (void)setDelegate:(id)delegate;
134 - (void)appendStreamData:(NSData *)data;
135 - (void)setShouldProvideMediaData:(BOOL)shouldProvideMediaData forTrackID:(CMPersistentTrackID)trackID;
136 - (BOOL)shouldProvideMediaDataForTrackID:(CMPersistentTrackID)trackID;
137 - (void)providePendingMediaData;
138 - (void)processContentKeyResponseData:(NSData *)contentKeyResponseData forTrackID:(CMPersistentTrackID)trackID;
139 - (void)processContentKeyResponseError:(NSError *)error forTrackID:(CMPersistentTrackID)trackID;
140 - (void)renewExpiringContentKeyResponseDataForTrackID:(CMPersistentTrackID)trackID;
141 - (NSData *)streamingContentKeyRequestDataForApp:(NSData *)appIdentifier contentIdentifier:(NSData *)contentIdentifier trackID:(CMPersistentTrackID)trackID options:(NSDictionary *)options error:(NSError **)outError;
142 @end
143
144 #pragma mark -
145 #pragma mark AVSampleBufferDisplayLayer
146
147 @interface AVSampleBufferDisplayLayer : CALayer
148 - (NSInteger)status;
149 - (NSError*)error;
150 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
151 - (void)flush;
152 - (BOOL)isReadyForMoreMediaData;
153 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
154 - (void)stopRequestingMediaData;
155 @end
156
157 #pragma mark -
158 #pragma mark AVSampleBufferAudioRenderer
159
160 @interface AVSampleBufferAudioRenderer : NSObject
161 - (NSInteger)status;
162 - (NSError*)error;
163 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
164 - (void)flush;
165 - (BOOL)isReadyForMoreMediaData;
166 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
167 - (void)stopRequestingMediaData;
168 @end
169
170 #pragma mark -
171 #pragma mark WebAVStreamDataParserListener
172
173 @interface WebAVStreamDataParserListener : NSObject {
174     WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
175     AVStreamDataParser* _parser;
176 }
177 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
178 @end
179
180 @implementation WebAVStreamDataParserListener
181 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
182 {
183     self = [super init];
184     if (!self)
185         return nil;
186
187     ASSERT(parent);
188     _parent = parent;
189     _parser = parser;
190     [_parser setDelegate:self];
191     return self;
192 }
193
194 - (void)dealloc
195 {
196     [_parser setDelegate:nil];
197     [super dealloc];
198 }
199
200 - (void)invalidate
201 {
202     [_parser setDelegate:nil];
203     _parser = nullptr;
204 }
205
206 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
207 {
208 #if ASSERT_DISABLED
209     UNUSED_PARAM(streamDataParser);
210 #endif
211     ASSERT(streamDataParser == _parser);
212     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
213
214     RetainPtr<AVAsset*> strongAsset = asset;
215     callOnMainThread([strongSelf, strongAsset] {
216         if (strongSelf->_parent)
217             strongSelf->_parent->didParseStreamDataAsAsset(strongAsset.get());
218     });
219 }
220
221 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
222 {
223     UNUSED_PARAM(discontinuity);
224 #if ASSERT_DISABLED
225     UNUSED_PARAM(streamDataParser);
226 #endif
227     ASSERT(streamDataParser == _parser);
228     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
229
230     RetainPtr<AVAsset*> strongAsset = asset;
231     callOnMainThread([strongSelf, strongAsset] {
232         if (strongSelf->_parent)
233             strongSelf->_parent->didParseStreamDataAsAsset(strongAsset.get());
234     });
235 }
236
237 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
238 {
239 #if ASSERT_DISABLED
240     UNUSED_PARAM(streamDataParser);
241 #endif
242     ASSERT(streamDataParser == _parser);
243     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
244
245     RetainPtr<NSError> strongError = error;
246     callOnMainThread([strongSelf, strongError] {
247         if (strongSelf->_parent)
248             strongSelf->_parent->didFailToParseStreamDataWithError(strongError.get());
249     });
250 }
251
252 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(NSUInteger)flags
253 {
254 #if ASSERT_DISABLED
255     UNUSED_PARAM(streamDataParser);
256 #endif
257     ASSERT(streamDataParser == _parser);
258     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
259
260     RetainPtr<CMSampleBufferRef> strongSample = sample;
261     String mediaType = nsMediaType;
262     callOnMainThread([strongSelf, strongSample, trackID, mediaType, flags] {
263         if (strongSelf->_parent)
264             strongSelf->_parent->didProvideMediaDataForTrackID(trackID, strongSample.get(), mediaType, flags);
265     });
266 }
267
268 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
269 {
270 #if ASSERT_DISABLED
271     UNUSED_PARAM(streamDataParser);
272 #endif
273     ASSERT(streamDataParser == _parser);
274     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
275
276     String mediaType = nsMediaType;
277     callOnMainThread([strongSelf, trackID, mediaType] {
278         if (strongSelf->_parent)
279             strongSelf->_parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
280     });
281 }
282
283 - (void)streamDataParserWillProvideContentKeyRequestInitializationData:(AVStreamDataParser *)streamDataParser forTrackID:(CMPersistentTrackID)trackID
284 {
285 #if ASSERT_DISABLED
286     UNUSED_PARAM(streamDataParser);
287 #endif
288     ASSERT(streamDataParser == _parser);
289
290     if (isMainThread()) {
291         _parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
292         return;
293     }
294
295     // We must call synchronously to the main thread, as the AVStreamSession must be associated
296     // with the streamDataParser before the delegate method returns.
297     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
298     dispatch_sync(dispatch_get_main_queue(), [strongSelf, trackID]() {
299         if (strongSelf->_parent)
300             strongSelf->_parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
301     });
302 }
303
304 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
305 {
306 #if ASSERT_DISABLED
307     UNUSED_PARAM(streamDataParser);
308 #endif
309     ASSERT(streamDataParser == _parser);
310     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
311
312     RetainPtr<NSData> strongData = initData;
313     callOnMainThread([strongSelf, strongData, trackID] {
314         if (strongSelf->_parent)
315             strongSelf->_parent->didProvideContentKeyRequestInitializationDataForTrackID(strongData.get(), trackID);
316     });
317 }
318 @end
319
320 @interface WebAVSampleBufferErrorListener : NSObject {
321     WebCore::SourceBufferPrivateAVFObjC* _parent;
322     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
323     Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
324 }
325
326 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
327 - (void)invalidate;
328 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
329 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
330 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
331 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
332 @end
333
334 @implementation WebAVSampleBufferErrorListener
335
336 - (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
337 {
338     if (!(self = [super init]))
339         return nil;
340
341     _parent = parent;
342     return self;
343 }
344
345 - (void)dealloc
346 {
347     [self invalidate];
348     [super dealloc];
349 }
350
351 - (void)invalidate
352 {
353     if (!_parent && !_layers.size() && !_renderers.size())
354         return;
355
356     for (auto& layer : _layers) {
357         [layer removeObserver:self forKeyPath:@"error"];
358         [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
359     }
360     _layers.clear();
361
362     for (auto& renderer : _renderers)
363         [renderer removeObserver:self forKeyPath:@"error"];
364     _renderers.clear();
365
366     [[NSNotificationCenter defaultCenter] removeObserver:self];
367
368     _parent = nullptr;
369 }
370
371 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
372 {
373     ASSERT(_parent);
374     ASSERT(!_layers.contains(layer));
375
376     _layers.append(layer);
377     [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
378     [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
379     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
380 }
381
382 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
383 {
384     ASSERT(_parent);
385     ASSERT(_layers.contains(layer));
386
387     [layer removeObserver:self forKeyPath:@"error"];
388     [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
389     _layers.remove(_layers.find(layer));
390
391     [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
392 }
393
394 - (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
395 {
396     ASSERT(_parent);
397     ASSERT(!_renderers.contains(renderer));
398
399     _renderers.append(renderer);
400     [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
401 }
402
403 - (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
404 {
405     ASSERT(_parent);
406     ASSERT(_renderers.contains(renderer));
407
408     [renderer removeObserver:self forKeyPath:@"error"];
409     _renderers.remove(_renderers.find(renderer));
410 }
411
412 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
413 {
414     UNUSED_PARAM(context);
415     UNUSED_PARAM(keyPath);
416     ASSERT(_parent);
417
418     RetainPtr<WebAVSampleBufferErrorListener> strongSelf = self;
419     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
420         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
421         ASSERT(_layers.contains(layer.get()));
422
423         if ([keyPath isEqualTo:@"error"]) {
424             RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
425             callOnMainThread([strongSelf, layer, error] {
426                 strongSelf->_parent->layerDidReceiveError(layer.get(), error.get());
427             });
428         } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
429             if ([[change valueForKey:NSKeyValueChangeNewKey] boolValue]) {
430                 RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
431                 callOnMainThread([strongSelf, layer, error] {
432                     strongSelf->_parent->layerDidReceiveError(layer.get(), error.get());
433                 });
434             }
435         } else
436             ASSERT_NOT_REACHED();
437
438     } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
439         RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
440         RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
441
442         ASSERT(_renderers.contains(renderer.get()));
443         ASSERT([keyPath isEqualTo:@"error"]);
444
445         callOnMainThread([strongSelf, renderer, error] {
446             strongSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
447         });
448     } else
449         ASSERT_NOT_REACHED();
450 }
451
452 - (void)layerFailedToDecode:(NSNotification*)note
453 {
454     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
455     ASSERT(_layers.contains(layer.get()));
456
457     RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
458
459     RetainPtr<WebAVSampleBufferErrorListener> strongSelf = self;
460     callOnMainThread([strongSelf, layer, error] {
461         strongSelf->_parent->layerDidReceiveError(layer.get(), error.get());
462     });
463 }
464 @end
465
466 namespace WebCore {
467
468 #pragma mark -
469 #pragma mark MediaSampleAVFObjC
470
471 class MediaSampleAVFObjC final : public MediaSample {
472 public:
473     static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
474     virtual ~MediaSampleAVFObjC() { }
475
476 private:
477     MediaSampleAVFObjC(CMSampleBufferRef sample, int trackID)
478         : m_sample(sample)
479         , m_id(String::format("%d", trackID))
480     {
481     }
482
483     virtual MediaTime presentationTime() const override { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
484     virtual MediaTime decodeTime() const override { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
485     virtual MediaTime duration() const override { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
486     virtual AtomicString trackID() const override { return m_id; }
487     virtual size_t sizeInBytes() const override { return CMSampleBufferGetTotalSampleSize(m_sample.get()); }
488     virtual FloatSize presentationSize() const override;
489
490     virtual SampleFlags flags() const override;
491     virtual PlatformSample platformSample() override;
492     virtual void dump(PrintStream&) const override;
493     virtual void offsetTimestampsBy(const MediaTime&) override;
494
495     RetainPtr<CMSampleBufferRef> m_sample;
496     AtomicString m_id;
497 };
498
499 PlatformSample MediaSampleAVFObjC::platformSample()
500 {
501     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
502     return sample;
503 }
504
505 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
506 {
507     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
508     if (!attachments)
509         return true;
510
511     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
512         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
513         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
514             return false;
515     }
516     return true;
517 }
518
519 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
520 {
521     int returnValue = MediaSample::None;
522
523     if (CMSampleBufferIsRandomAccess(m_sample.get()))
524         returnValue |= MediaSample::IsSync;
525
526     return SampleFlags(returnValue);
527 }
528
529 FloatSize MediaSampleAVFObjC::presentationSize() const
530 {
531     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(m_sample.get());
532     if (CMFormatDescriptionGetMediaType(formatDescription) != kCMMediaType_Video)
533         return FloatSize();
534
535     return FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true)); 
536 }
537
538 void MediaSampleAVFObjC::dump(PrintStream& out) const
539 {
540     out.print("{PTS(", presentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), presentationSize(", presentationSize(), ")}");
541 }
542
543 void MediaSampleAVFObjC::offsetTimestampsBy(const MediaTime& offset)
544 {
545     CMItemCount itemCount = 0;
546     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
547         return;
548
549     Vector<CMSampleTimingInfo> timingInfoArray;
550     timingInfoArray.grow(itemCount);
551     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
552         return;
553
554     for (auto& timing : timingInfoArray) {
555         timing.presentationTimeStamp = toCMTime(toMediaTime(timing.presentationTimeStamp) + offset);
556         timing.decodeTimeStamp = toCMTime(toMediaTime(timing.decodeTimeStamp) + offset);
557     }
558
559     CMSampleBufferRef newSample;
560     if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
561         return;
562
563     m_sample = adoptCF(newSample);
564 }
565
566 #pragma mark -
567 #pragma mark MediaDescriptionAVFObjC
568
569 class MediaDescriptionAVFObjC final : public MediaDescription {
570 public:
571     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
572     virtual ~MediaDescriptionAVFObjC() { }
573
574     virtual AtomicString codec() const override { return m_codec; }
575     virtual bool isVideo() const override { return m_isVideo; }
576     virtual bool isAudio() const override { return m_isAudio; }
577     virtual bool isText() const override { return m_isText; }
578     
579 protected:
580     MediaDescriptionAVFObjC(AVAssetTrack* track)
581         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
582         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
583         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
584     {
585         NSArray* formatDescriptions = [track formatDescriptions];
586         CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
587         if (description) {
588             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
589             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
590         }
591     }
592
593     AtomicString m_codec;
594     bool m_isVideo;
595     bool m_isAudio;
596     bool m_isText;
597 };
598
599 #pragma mark -
600 #pragma mark SourceBufferPrivateAVFObjC
601
602 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
603 {
604     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
605 }
606
607 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
608     : m_weakFactory(this)
609     , m_parser(adoptNS([allocAVStreamDataParserInstance() init]))
610     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
611     , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
612     , m_mediaSource(parent)
613     , m_client(0)
614     , m_parsingSucceeded(true)
615     , m_enabledVideoTrackID(-1)
616     , m_protectedTrackID(-1)
617 {
618 }
619
620 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
621 {
622     ASSERT(!m_client);
623     destroyParser();
624     destroyRenderers();
625 }
626
627 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
628 {
629     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
630
631     m_asset = asset;
632
633     m_videoTracks.clear();
634     m_audioTracks.clear();
635
636     SourceBufferPrivateClient::InitializationSegment segment;
637     segment.duration = toMediaTime([m_asset duration]);
638
639     for (AVAssetTrack* track in [m_asset tracks]) {
640         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
641             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
642             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
643             info.track = videoTrack;
644             m_videoTracks.append(videoTrack);
645             info.description = MediaDescriptionAVFObjC::create(track);
646             segment.videoTracks.append(info);
647         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
648             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
649             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
650             info.track = audioTrack;
651             m_audioTracks.append(audioTrack);
652             info.description = MediaDescriptionAVFObjC::create(track);
653             segment.audioTracks.append(info);
654         }
655
656         // FIXME(125161): Add TextTrack support
657     }
658
659     if (m_mediaSource)
660         m_mediaSource->player()->characteristicsChanged();
661
662     if (m_client)
663         m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
664 }
665
666 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
667 {
668 #if LOG_DISABLED
669     UNUSED_PARAM(error);
670 #endif
671     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
672
673     m_parsingSucceeded = false;
674 }
675
676 struct ProcessCodedFrameInfo {
677     SourceBufferPrivateAVFObjC* sourceBuffer;
678     int trackID;
679     const String& mediaType;
680 };
681
682 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
683 {
684     UNUSED_PARAM(flags);
685
686     processCodedFrame(trackID, sampleBuffer, mediaType);
687 }
688
689 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
690 {
691     if (trackID == m_enabledVideoTrackID) {
692         CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
693         FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
694         if (formatSize != m_cachedSize) {
695             LOG(MediaSource, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
696             m_cachedSize = formatSize;
697             if (m_mediaSource)
698                 m_mediaSource->player()->sizeChanged();
699         }
700     }
701
702
703     if (m_client) {
704         RefPtr<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
705         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
706         m_client->sourceBufferPrivateDidReceiveSample(this, mediaSample.release());
707     }
708
709     return true;
710 }
711
712 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
713 {
714     UNUSED_PARAM(mediaType);
715     UNUSED_PARAM(trackID);
716     notImplemented();
717 }
718
719 void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(int trackID)
720 {
721     if (!m_mediaSource)
722         return;
723
724     ASSERT(m_parser);
725
726 #if ENABLE(ENCRYPTED_MEDIA_V2)
727     LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
728     m_protectedTrackID = trackID;
729
730     BEGIN_BLOCK_OBJC_EXCEPTIONS;
731     [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
732     END_BLOCK_OBJC_EXCEPTIONS;
733 #else
734     UNUSED_PARAM(trackID);
735 #endif
736 }
737
738 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID)
739 {
740     if (!m_mediaSource)
741         return;
742
743     UNUSED_PARAM(trackID);
744 #if ENABLE(ENCRYPTED_MEDIA_V2)
745     LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
746     m_protectedTrackID = trackID;
747     RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
748     [initData getBytes:initDataArray->data() length:initDataArray->length()];
749     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
750 #else
751     UNUSED_PARAM(initData);
752 #endif
753 }
754
755 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
756 {
757     m_client = client;
758 }
759
760 static dispatch_queue_t globalDataParserQueue()
761 {
762     static dispatch_queue_t globalQueue;
763     static dispatch_once_t onceToken;
764     dispatch_once(&onceToken, ^{
765         globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
766     });
767     return globalQueue;
768 }
769
770 void SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
771 {
772     LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
773
774     RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data length:length]);
775     WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
776     RetainPtr<AVStreamDataParser> parser = m_parser;
777     RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
778
779     m_parsingSucceeded = true;
780
781     dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate] {
782
783         [parser appendStreamData:nsData.get()];
784
785         callOnMainThread([weakThis] {
786             if (weakThis)
787                 weakThis->appendCompleted();
788         });
789     });
790 }
791
792 void SourceBufferPrivateAVFObjC::appendCompleted()
793 {
794     if (m_parsingSucceeded && m_mediaSource)
795         m_mediaSource->player()->setLoadingProgresssed(true);
796
797     if (m_client)
798         m_client->sourceBufferPrivateAppendComplete(this, m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
799 }
800
801 void SourceBufferPrivateAVFObjC::abort()
802 {
803     // The parser does not have a mechanism for resetting to a clean state, so destroy and re-create it.
804     // FIXME(135164): Support resetting parser to the last appended initialization segment.
805     destroyParser();
806
807     m_parser = adoptNS([allocAVStreamDataParserInstance() init]);
808     m_delegate = adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]);
809 }
810
811 void SourceBufferPrivateAVFObjC::destroyParser()
812 {
813     if (m_mediaSource && m_mediaSource->player()->hasStreamSession())
814         [m_mediaSource->player()->streamSession() removeStreamDataParser:m_parser.get()];
815
816     [m_delegate invalidate];
817     m_delegate = nullptr;
818     m_parser = nullptr;
819 }
820
821 void SourceBufferPrivateAVFObjC::destroyRenderers()
822 {
823     if (m_displayLayer) {
824         if (m_mediaSource)
825             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
826         [m_displayLayer flush];
827         [m_displayLayer stopRequestingMediaData];
828         [m_errorListener stopObservingLayer:m_displayLayer.get()];
829         m_displayLayer = nullptr;
830     }
831
832     for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
833         AVSampleBufferAudioRenderer* renderer = it->second.get();
834         if (m_mediaSource)
835             m_mediaSource->player()->removeAudioRenderer(renderer);
836         [renderer flush];
837         [renderer stopRequestingMediaData];
838         [m_errorListener stopObservingRenderer:renderer];
839     }
840
841     m_audioRenderers.clear();
842 }
843
844 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
845 {
846     destroyParser();
847     destroyRenderers();
848
849     if (m_mediaSource)
850         m_mediaSource->removeSourceBuffer(this);
851 }
852
853 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
854 {
855     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
856 }
857
858 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
859 {
860     if (m_mediaSource)
861         m_mediaSource->player()->setReadyState(readyState);
862 }
863
864 bool SourceBufferPrivateAVFObjC::hasVideo() const
865 {
866     if (!m_client)
867         return false;
868
869     return m_client->sourceBufferPrivateHasVideo(this);
870 }
871
872 bool SourceBufferPrivateAVFObjC::hasAudio() const
873 {
874     if (!m_client)
875         return false;
876
877     return m_client->sourceBufferPrivateHasAudio(this);
878 }
879
880 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
881 {
882     int trackID = track->trackID();
883     if (!track->selected() && m_enabledVideoTrackID == trackID) {
884         m_enabledVideoTrackID = -1;
885         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
886         if (m_mediaSource)
887             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
888     } else if (track->selected()) {
889         m_enabledVideoTrackID = trackID;
890         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
891         if (!m_displayLayer) {
892             m_displayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
893             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
894                 didBecomeReadyForMoreSamples(trackID);
895             }];
896             [m_errorListener beginObservingLayer:m_displayLayer.get()];
897         }
898         if (m_mediaSource)
899             m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
900     }
901 }
902
903 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
904 {
905     int trackID = track->trackID();
906
907     if (!track->enabled()) {
908         AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
909         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
910         if (m_mediaSource)
911             m_mediaSource->player()->removeAudioRenderer(renderer);
912     } else {
913         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
914         RetainPtr<AVSampleBufferAudioRenderer> renderer;
915         if (!m_audioRenderers.count(trackID)) {
916             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
917             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
918                 didBecomeReadyForMoreSamples(trackID);
919             }];
920             m_audioRenderers[trackID] = renderer;
921             [m_errorListener beginObservingRenderer:renderer.get()];
922         } else
923             renderer = m_audioRenderers[trackID].get();
924
925         if (m_mediaSource)
926             m_mediaSource->player()->addAudioRenderer(renderer.get());
927     }
928 }
929
930 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
931 {
932     ASSERT(!m_errorClients.contains(client));
933     m_errorClients.append(client);
934 }
935
936 void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
937 {
938     ASSERT(m_errorClients.contains(client));
939     m_errorClients.remove(m_errorClients.find(client));
940 }
941
942 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
943 {
944     LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
945     for (auto& client : m_errorClients)
946         client->layerDidReceiveError(layer, error);
947
948     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
949
950     if (m_client)
951         m_client->sourceBufferPrivateDidReceiveRenderingError(this, errorCode);
952 }
953
954 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
955 {
956     LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
957     for (auto& client : m_errorClients)
958         client->rendererDidReceiveError(renderer, error);
959 }
960
961 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
962 {
963     CMSampleBufferRef newSampleBuffer = 0;
964     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
965     if (!newSampleBuffer)
966         return sampleBuffer;
967
968     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
969     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
970         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
971         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
972     }
973
974     return adoptCF(newSampleBuffer);
975 }
976
977 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
978 {
979     int trackID = trackIDString.toInt();
980     LOG(MediaSource, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
981
982     if (trackID == m_enabledVideoTrackID)
983         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
984     else if (m_audioRenderers.count(trackID))
985         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
986 }
987
988 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
989 {
990     [renderer flush];
991
992     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
993         RefPtr<MediaSample>& mediaSample = *it;
994
995         PlatformSample platformSample = mediaSample->platformSample();
996         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
997
998         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
999
1000         [renderer enqueueSampleBuffer:sampleBuffer.get()];
1001     }
1002 }
1003
1004 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
1005 {
1006     [layer flush];
1007
1008     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
1009         RefPtr<MediaSample>& mediaSample = *it;
1010
1011         LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
1012
1013         PlatformSample platformSample = mediaSample->platformSample();
1014         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
1015
1016         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
1017
1018         [layer enqueueSampleBuffer:sampleBuffer.get()];
1019     }
1020
1021     if (m_mediaSource)
1022         m_mediaSource->player()->setHasAvailableVideoFrame(false);
1023 }
1024
1025 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
1026 {
1027     int trackID = trackIDString.toInt();
1028     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
1029         return;
1030
1031     RefPtr<MediaSample> mediaSample = prpMediaSample;
1032
1033     PlatformSample platformSample = mediaSample->platformSample();
1034     if (platformSample.type != PlatformSample::CMSampleBufferType)
1035         return;
1036
1037     LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
1038
1039     if (trackID == m_enabledVideoTrackID) {
1040         [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1041         if (m_mediaSource)
1042             m_mediaSource->player()->setHasAvailableVideoFrame(true);
1043     } else
1044         [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
1045 }
1046
1047 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
1048 {
1049     int trackID = trackIDString.toInt();
1050     if (trackID == m_enabledVideoTrackID)
1051         return [m_displayLayer isReadyForMoreMediaData];
1052     else if (m_audioRenderers.count(trackID))
1053         return [m_audioRenderers[trackID] isReadyForMoreMediaData];
1054     else
1055         ASSERT_NOT_REACHED();
1056
1057     return false;
1058 }
1059
1060 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
1061 {
1062     if (m_mediaSource)
1063         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
1064 }
1065
1066 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
1067 {
1068     if (m_client)
1069         return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
1070     return time;
1071 }
1072
1073 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
1074 {
1075     if (m_client)
1076         m_client->sourceBufferPrivateSeekToTime(this, time);
1077 }
1078
1079 IntSize SourceBufferPrivateAVFObjC::naturalSize()
1080 {
1081     return roundedIntSize(m_cachedSize);
1082 }
1083
1084 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1085 {
1086     if (trackID == m_enabledVideoTrackID)
1087         [m_displayLayer stopRequestingMediaData];
1088     else if (m_audioRenderers.count(trackID))
1089         [m_audioRenderers[trackID] stopRequestingMediaData];
1090     else {
1091         ASSERT_NOT_REACHED();
1092         return;
1093     }
1094
1095     if (m_client)
1096         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
1097 }
1098
1099 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
1100 {
1101     int trackID = trackIDString.toInt();
1102     if (trackID == m_enabledVideoTrackID) {
1103         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
1104             didBecomeReadyForMoreSamples(trackID);
1105         }];
1106     } else if (m_audioRenderers.count(trackID)) {
1107         [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
1108             didBecomeReadyForMoreSamples(trackID);
1109         }];
1110     } else
1111         ASSERT_NOT_REACHED();
1112 }
1113
1114 }
1115
1116 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)