[MSE][Mac] Add a new MSE-compatible MediaPlayerPrivate implementation, MediaPlayerPri...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "ExceptionCodePlaceholder.h"
32 #import "Logging.h"
33 #import "MediaDescription.h"
34 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
35 #import "MediaSample.h"
36 #import "MediaSourcePrivateAVFObjC.h"
37 #import "MediaTimeMac.h"
38 #import "NotImplemented.h"
39 #import "SoftLinking.h"
40 #import "SourceBufferPrivateClient.h"
41 #import "TimeRanges.h"
42 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
43 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import <AVFoundation/AVAssetTrack.h>
46 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
47 #import <objc/runtime.h>
48 #import <wtf/text/AtomicString.h>
49 #import <wtf/text/CString.h>
50 #import <wtf/HashCountedSet.h>
51 #import <wtf/WeakPtr.h>
52 #import <map>
53
54 #pragma mark -
55 #pragma mark Soft Linking
56
57 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
58 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
59
60 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
61 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
62 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
63
64 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
65 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
66 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
67
68 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
69 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
70 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
71 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
72 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
73 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
74 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
75 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
76
77 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
80
81 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
82 SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
83 SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
84 SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
85 SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
86 SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
87 SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
88 SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
89 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
90 SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
91
92 #define AVMediaTypeVideo getAVMediaTypeVideo()
93 #define AVMediaTypeAudio getAVMediaTypeAudio()
94 #define AVMediaTypeText getAVMediaTypeText()
95 #define kCMTimeZero getkCMTimeZero()
96 #define kCMTimeInvalid getkCMTimeInvalid()
97 #define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
98 #define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
99 #define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
100 #define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
101 #define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
102 #define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
103
104 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
105 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
106 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
107
108 #pragma mark -
109 #pragma mark AVStreamDataParser
110 @class AVStreamDataParserInternal;
111
112 NS_CLASS_AVAILABLE(TBD, TBD)
113 @interface AVStreamDataParser : NSObject
114 - (void)setDelegate:(id)delegate;
115 - (void)appendStreamData:(NSData *)data;
116 - (void)setShouldProvideMediaData:(BOOL)shouldProvideMediaData forTrackID:(CMPersistentTrackID)trackID;
117 - (BOOL)shouldProvideMediaDataForTrackID:(CMPersistentTrackID)trackID;
118 @end
119
120 #pragma mark -
121 #pragma mark WebAVStreamDataParserListener
122
123 @interface WebAVStreamDataParserListener : NSObject {
124     WebCore::SourceBufferPrivateAVFObjC* _parent;
125     AVStreamDataParser* _parser;
126 }
127 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
128 @end
129
130 @implementation WebAVStreamDataParserListener
131 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent
132 {
133     self = [super init];
134     if (!self)
135         return nil;
136
137     ASSERT(parent);
138     _parent = parent;
139     _parser = parser;
140     [_parser setDelegate:self];
141     return self;
142 }
143
144 - (void)dealloc
145 {
146     [_parser setDelegate:nil];
147     [super dealloc];
148 }
149
150 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
151 {
152 #if ASSERT_DISABLED
153     UNUSED_PARAM(streamDataParser);
154 #endif
155     ASSERT(streamDataParser == _parser);
156     _parent->didParseStreamDataAsAsset(asset);
157 }
158
159 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
160 {
161 #if ASSERT_DISABLED
162     UNUSED_PARAM(streamDataParser);
163 #endif
164     ASSERT(streamDataParser == _parser);
165     _parent->didFailToParseStreamDataWithError(error);
166 }
167
168 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)mediaData forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType flags:(NSUInteger)flags
169 {
170 #if ASSERT_DISABLED
171     UNUSED_PARAM(streamDataParser);
172 #endif
173     ASSERT(streamDataParser == _parser);
174     _parent->didProvideMediaDataForTrackID(trackID, mediaData, mediaType, flags);
175 }
176
177 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType
178 {
179 #if ASSERT_DISABLED
180     UNUSED_PARAM(streamDataParser);
181 #endif
182     ASSERT(streamDataParser == _parser);
183     _parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
184 }
185 @end
186
187 namespace WebCore {
188
189 #pragma mark -
190 #pragma mark MediaSampleAVFObjC
191
192 class MediaSampleAVFObjC FINAL : public MediaSample {
193 public:
194     static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
195     virtual ~MediaSampleAVFObjC() { }
196
197     virtual MediaTime presentationTime() const OVERRIDE { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
198     virtual MediaTime decodeTime() const OVERRIDE { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
199     virtual MediaTime duration() const OVERRIDE { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
200     virtual AtomicString trackID() const OVERRIDE { return m_id; }
201
202     virtual SampleFlags flags() const OVERRIDE;
203     virtual PlatformSample platformSample() OVERRIDE;
204
205 protected:
206     MediaSampleAVFObjC(CMSampleBufferRef sample, int trackID)
207         : m_sample(sample)
208         , m_id(String::format("%d", trackID))
209     {
210     }
211
212     RetainPtr<CMSampleBufferRef> m_sample;
213     AtomicString m_id;
214 };
215
216 PlatformSample MediaSampleAVFObjC::platformSample()
217 {
218     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
219     return sample;
220 }
221
222 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
223 {
224     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
225     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
226         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
227         if (!CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
228             return true;
229     }
230     return false;
231 }
232
233 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
234 {
235     int returnValue = MediaSample::None;
236
237     if (CMSampleBufferIsRandomAccess(m_sample.get()))
238         returnValue |= MediaSample::IsSync;
239
240     return SampleFlags(returnValue);
241 }
242
243 #pragma mark -
244 #pragma mark MediaDescriptionAVFObjC
245
246 class MediaDescriptionAVFObjC FINAL : public MediaDescription {
247 public:
248     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
249     virtual ~MediaDescriptionAVFObjC() { }
250
251     virtual AtomicString codec() const OVERRIDE { return m_codec; }
252     virtual bool isVideo() const OVERRIDE { return m_isVideo; }
253     virtual bool isAudio() const OVERRIDE { return m_isAudio; }
254     virtual bool isText() const OVERRIDE { return m_isText; }
255     
256 protected:
257     MediaDescriptionAVFObjC(AVAssetTrack* track)
258         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
259         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
260         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
261     {
262         NSArray* formatDescriptions = [track formatDescriptions];
263         CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
264         if (description) {
265             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
266             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
267         }
268     }
269
270     AtomicString m_codec;
271     bool m_isVideo;
272     bool m_isAudio;
273     bool m_isText;
274 };
275
276 #pragma mark -
277 #pragma mark SourceBufferPrivateAVFObjC
278
279 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
280 {
281     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
282 }
283
284 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
285     : m_parser(adoptNS([[getAVStreamDataParserClass() alloc] init]))
286     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:this]))
287     , m_parent(parent)
288     , m_client(0)
289     , m_parsingSucceeded(true)
290 {
291 }
292
293 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
294 {
295     if (m_displayLayer) {
296         m_parent->player()->removeDisplayLayer(m_displayLayer.get());
297         [m_displayLayer flushAndRemoveImage];
298         [m_displayLayer stopRequestingMediaData];
299         m_displayLayer = nullptr;
300     }
301 }
302
303 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
304 {
305     LOG(Media, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
306
307     m_asset = asset;
308
309     SourceBufferPrivateClient::InitializationSegment segment;
310     segment.duration = toMediaTime([m_asset duration]);
311
312     for (AVAssetTrack* track in [m_asset tracks]) {
313         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
314             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
315             info.track = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
316             info.description = MediaDescriptionAVFObjC::create(track);
317             segment.videoTracks.append(info);
318         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
319             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
320             info.track = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
321             info.description = MediaDescriptionAVFObjC::create(track);
322             segment.audioTracks.append(info);
323         }
324
325         // FIXME(125161): Add TextTrack support
326     }
327
328     if (m_client)
329         m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
330 }
331
332 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
333 {
334 #if LOG_DISABLED
335     UNUSED_PARAM(error);
336 #endif
337     LOG(Media, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
338
339     m_parsingSucceeded = false;
340 }
341
342 struct ProcessCodedFrameInfo {
343     SourceBufferPrivateAVFObjC* sourceBuffer;
344     int trackID;
345     const String& mediaType;
346 };
347
348 static OSStatus callProcessCodedFrameForEachSample(CMSampleBufferRef sampleBuffer, CMItemCount, void *refcon)
349 {
350     ProcessCodedFrameInfo* info = static_cast<ProcessCodedFrameInfo*>(refcon);
351     return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType);
352 }
353
354 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
355 {
356     UNUSED_PARAM(flags);
357
358     ProcessCodedFrameInfo info = {this, trackID, mediaType};
359     CMSampleBufferCallForEachSample(sampleBuffer, &callProcessCodedFrameForEachSample, &info);
360 }
361
362 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
363 {
364     if (m_client)
365         m_client->sourceBufferPrivateDidReceiveSample(this, MediaSampleAVFObjC::create(sampleBuffer, trackID));
366
367     return true;
368 }
369
370 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
371 {
372     UNUSED_PARAM(mediaType);
373     UNUSED_PARAM(trackID);
374     notImplemented();
375 }
376
377 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
378 {
379     m_client = client;
380 }
381
382 SourceBufferPrivate::AppendResult SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
383 {
384     m_parsingSucceeded = true;
385
386     LOG(Media, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
387     [m_parser appendStreamData:[NSData dataWithBytes:data length:length]];
388
389     if (m_parsingSucceeded)
390         m_parent->player()->setLoadingProgresssed(true);
391
392     return m_parsingSucceeded ? AppendSucceeded : ParsingFailed;
393 }
394
395 void SourceBufferPrivateAVFObjC::abort()
396 {
397     notImplemented();
398 }
399
400 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
401 {
402     if (m_displayLayer) {
403         m_parent->player()->removeDisplayLayer(m_displayLayer.get());
404         [m_displayLayer flush];
405         [m_displayLayer stopRequestingMediaData];
406         m_displayLayer = nullptr;
407     }
408
409     m_parent->removeSourceBuffer(this);
410 }
411
412 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
413 {
414     return m_parent->player()->readyState();
415 }
416
417 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
418 {
419     m_parent->player()->setReadyState(readyState);
420 }
421
422 void SourceBufferPrivateAVFObjC::evictCodedFrames()
423 {
424     notImplemented();
425 }
426
427 bool SourceBufferPrivateAVFObjC::isFull()
428 {
429     notImplemented();
430     return false;
431 }
432
433
434 bool SourceBufferPrivateAVFObjC::hasVideo() const
435 {
436     if (!m_client)
437         return false;
438
439     return m_client->sourceBufferPrivateHasVideo(this);
440 }
441
442 bool SourceBufferPrivateAVFObjC::hasAudio() const
443 {
444     if (!m_client)
445         return false;
446
447     return m_client->sourceBufferPrivateHasAudio(this);
448 }
449
450 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
451 {
452     int trackID = track->trackID();
453     if (!track->selected() && m_enabledVideoTrackID == trackID) {
454         m_enabledVideoTrackID = -1;
455         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
456     } else if (track->selected()) {
457         m_enabledVideoTrackID = trackID;
458         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
459         if (!m_displayLayer) {
460             m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
461             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
462                 if (m_client)
463                     m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this);
464             }];
465             m_parent->player()->addDisplayLayer(m_displayLayer.get());
466         }
467     }
468 }
469
470 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC*)
471 {
472     // No-op.
473 }
474
475 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
476 {
477     CMSampleBufferRef newSampleBuffer = 0;
478     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
479     if (!newSampleBuffer)
480         return sampleBuffer;
481
482     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
483     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
484         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
485         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
486     }
487
488     return adoptCF(newSampleBuffer);
489 }
490
491 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackID)
492 {
493     if (trackID.toInt() != m_enabledVideoTrackID)
494         return;
495
496     LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID.toInt());
497
498     [m_displayLayer flush];
499
500     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
501         RefPtr<MediaSample>& mediaSample = *it;
502
503         PlatformSample platformSample = mediaSample->platformSample();
504         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
505
506         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
507
508         [m_displayLayer enqueueSampleBuffer:sampleBuffer.get()];
509     }
510
511     m_parent->player()->setHasAvailableVideoFrame(false);
512 }
513
514 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackID)
515 {
516     if (trackID.toInt() != m_enabledVideoTrackID)
517         return;
518
519     RefPtr<MediaSample> mediaSample = prpMediaSample;
520
521     PlatformSample platformSample = mediaSample->platformSample();
522     if (platformSample.type != PlatformSample::CMSampleBufferType)
523         return;
524
525     [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
526     m_parent->player()->setHasAvailableVideoFrame(true);
527 }
528
529 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples()
530 {
531     return [m_displayLayer isReadyForMoreMediaData];
532 }
533
534 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
535 {
536     m_parent->sourceBufferPrivateDidChangeActiveState(this, isActive);
537 }
538
539 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
540 {
541     if (m_client)
542         return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
543     return time;
544 }
545
546 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
547 {
548     if (m_client)
549         m_client->sourceBufferPrivateSeekToTime(this, time);
550 }
551
552 }
553
554 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)