[MSE][Mac] Adopt new AVStreamDataParser delegate API
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "ExceptionCodePlaceholder.h"
32 #import "Logging.h"
33 #import "MediaDescription.h"
34 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
35 #import "MediaSample.h"
36 #import "MediaSourcePrivateAVFObjC.h"
37 #import "MediaTimeMac.h"
38 #import "NotImplemented.h"
39 #import "SoftLinking.h"
40 #import "SourceBufferPrivateClient.h"
41 #import "TimeRanges.h"
42 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
43 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import <AVFoundation/AVFoundation.h>
46 #import <objc/runtime.h>
47 #import <wtf/text/AtomicString.h>
48 #import <wtf/text/CString.h>
49 #import <wtf/HashCountedSet.h>
50 #import <wtf/WeakPtr.h>
51 #import <map>
52
53 #pragma mark -
54 #pragma mark Soft Linking
55
56 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
57 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
58
59 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
60 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
61 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
62 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
63
64 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
65 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
66 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
67
68 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
69 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
70 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
71 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
72 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
73 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
74 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
75 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
76
77 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
80
81 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
82 SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
83 SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
84 SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
85 SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
86 SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
87 SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
88 SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
89 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
90 SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
91
92 #define AVMediaTypeVideo getAVMediaTypeVideo()
93 #define AVMediaTypeAudio getAVMediaTypeAudio()
94 #define AVMediaTypeText getAVMediaTypeText()
95 #define kCMTimeZero getkCMTimeZero()
96 #define kCMTimeInvalid getkCMTimeInvalid()
97 #define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
98 #define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
99 #define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
100 #define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
101 #define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
102 #define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
103
104 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
105 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
106 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
107
108 #pragma mark -
109 #pragma mark AVStreamDataParser
110
111 @interface AVStreamDataParser : NSObject
112 - (void)setDelegate:(id)delegate;
113 - (void)appendStreamData:(NSData *)data;
114 - (void)setShouldProvideMediaData:(BOOL)shouldProvideMediaData forTrackID:(CMPersistentTrackID)trackID;
115 - (BOOL)shouldProvideMediaDataForTrackID:(CMPersistentTrackID)trackID;
116 @end
117
118 #pragma mark -
119 #pragma mark AVSampleBufferAudioRenderer
120
121 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
122 @interface AVSampleBufferAudioRenderer : NSObject
123 - (NSInteger)status;
124 - (NSError*)error;
125 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
126 - (void)flush;
127 - (BOOL)isReadyForMoreMediaData;
128 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
129 - (void)stopRequestingMediaData;
130 @end
131 #endif
132
133 #pragma mark -
134 #pragma mark WebAVStreamDataParserListener
135
136 @interface WebAVStreamDataParserListener : NSObject {
137     WebCore::SourceBufferPrivateAVFObjC* _parent;
138     AVStreamDataParser* _parser;
139 }
140 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
141 @end
142
143 @implementation WebAVStreamDataParserListener
144 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent
145 {
146     self = [super init];
147     if (!self)
148         return nil;
149
150     ASSERT(parent);
151     _parent = parent;
152     _parser = parser;
153     [_parser setDelegate:self];
154     return self;
155 }
156
157 - (void)dealloc
158 {
159     [_parser setDelegate:nil];
160     [super dealloc];
161 }
162
163 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
164 {
165 #if ASSERT_DISABLED
166     UNUSED_PARAM(streamDataParser);
167 #endif
168     ASSERT(streamDataParser == _parser);
169     _parent->didParseStreamDataAsAsset(asset);
170 }
171
172 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
173 {
174     UNUSED_PARAM(discontinuity);
175 #if ASSERT_DISABLED
176     UNUSED_PARAM(streamDataParser);
177 #endif
178     ASSERT(streamDataParser == _parser);
179     _parent->didParseStreamDataAsAsset(asset);
180 }
181
182 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
183 {
184 #if ASSERT_DISABLED
185     UNUSED_PARAM(streamDataParser);
186 #endif
187     ASSERT(streamDataParser == _parser);
188     _parent->didFailToParseStreamDataWithError(error);
189 }
190
191 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)mediaData forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType flags:(NSUInteger)flags
192 {
193 #if ASSERT_DISABLED
194     UNUSED_PARAM(streamDataParser);
195 #endif
196     ASSERT(streamDataParser == _parser);
197     _parent->didProvideMediaDataForTrackID(trackID, mediaData, mediaType, flags);
198 }
199
200 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType
201 {
202 #if ASSERT_DISABLED
203     UNUSED_PARAM(streamDataParser);
204 #endif
205     ASSERT(streamDataParser == _parser);
206     _parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
207 }
208 @end
209
210 namespace WebCore {
211
212 #pragma mark -
213 #pragma mark MediaSampleAVFObjC
214
215 class MediaSampleAVFObjC final : public MediaSample {
216 public:
217     static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
218     virtual ~MediaSampleAVFObjC() { }
219
220     virtual MediaTime presentationTime() const override { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
221     virtual MediaTime decodeTime() const override { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
222     virtual MediaTime duration() const override { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
223     virtual AtomicString trackID() const override { return m_id; }
224
225     virtual SampleFlags flags() const override;
226     virtual PlatformSample platformSample() override;
227
228 protected:
229     MediaSampleAVFObjC(CMSampleBufferRef sample, int trackID)
230         : m_sample(sample)
231         , m_id(String::format("%d", trackID))
232     {
233     }
234
235     RetainPtr<CMSampleBufferRef> m_sample;
236     AtomicString m_id;
237 };
238
239 PlatformSample MediaSampleAVFObjC::platformSample()
240 {
241     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
242     return sample;
243 }
244
245 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
246 {
247     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
248     if (!attachments)
249         return true;
250
251     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
252         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
253         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
254             return false;
255     }
256     return true;
257 }
258
259 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
260 {
261     int returnValue = MediaSample::None;
262
263     if (CMSampleBufferIsRandomAccess(m_sample.get()))
264         returnValue |= MediaSample::IsSync;
265
266     return SampleFlags(returnValue);
267 }
268
269 #pragma mark -
270 #pragma mark MediaDescriptionAVFObjC
271
272 class MediaDescriptionAVFObjC final : public MediaDescription {
273 public:
274     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
275     virtual ~MediaDescriptionAVFObjC() { }
276
277     virtual AtomicString codec() const override { return m_codec; }
278     virtual bool isVideo() const override { return m_isVideo; }
279     virtual bool isAudio() const override { return m_isAudio; }
280     virtual bool isText() const override { return m_isText; }
281     
282 protected:
283     MediaDescriptionAVFObjC(AVAssetTrack* track)
284         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
285         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
286         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
287     {
288         NSArray* formatDescriptions = [track formatDescriptions];
289         CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
290         if (description) {
291             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
292             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
293         }
294     }
295
296     AtomicString m_codec;
297     bool m_isVideo;
298     bool m_isAudio;
299     bool m_isText;
300 };
301
302 #pragma mark -
303 #pragma mark SourceBufferPrivateAVFObjC
304
305 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
306 {
307     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
308 }
309
310 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
311     : m_parser(adoptNS([[getAVStreamDataParserClass() alloc] init]))
312     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:this]))
313     , m_mediaSource(parent)
314     , m_client(0)
315     , m_parsingSucceeded(true)
316     , m_enabledVideoTrackID(-1)
317 {
318 }
319
320 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
321 {
322     destroyRenderers();
323 }
324
325 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
326 {
327     LOG(Media, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
328
329     m_asset = asset;
330
331     m_videoTracks.clear();
332     m_audioTracks.clear();
333
334     SourceBufferPrivateClient::InitializationSegment segment;
335     segment.duration = toMediaTime([m_asset duration]);
336
337     for (AVAssetTrack* track in [m_asset tracks]) {
338         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
339             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
340             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
341             info.track = videoTrack;
342             m_videoTracks.append(videoTrack);
343             info.description = MediaDescriptionAVFObjC::create(track);
344             segment.videoTracks.append(info);
345         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
346             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
347             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
348             info.track = audioTrack;
349             m_audioTracks.append(audioTrack);
350             info.description = MediaDescriptionAVFObjC::create(track);
351             segment.audioTracks.append(info);
352         }
353
354         // FIXME(125161): Add TextTrack support
355     }
356
357     if (!m_videoTracks.isEmpty())
358         m_mediaSource->player()->sizeChanged();
359
360     if (m_client)
361         m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
362 }
363
364 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
365 {
366 #if LOG_DISABLED
367     UNUSED_PARAM(error);
368 #endif
369     LOG(Media, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
370
371     m_parsingSucceeded = false;
372 }
373
374 struct ProcessCodedFrameInfo {
375     SourceBufferPrivateAVFObjC* sourceBuffer;
376     int trackID;
377     const String& mediaType;
378 };
379
380 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
381 {
382     UNUSED_PARAM(flags);
383
384     processCodedFrame(trackID, sampleBuffer, mediaType);
385 }
386
387 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
388 {
389     if (m_client)
390         m_client->sourceBufferPrivateDidReceiveSample(this, MediaSampleAVFObjC::create(sampleBuffer, trackID));
391
392     return true;
393 }
394
395 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
396 {
397     UNUSED_PARAM(mediaType);
398     UNUSED_PARAM(trackID);
399     notImplemented();
400 }
401
402 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
403 {
404     m_client = client;
405 }
406
407 SourceBufferPrivate::AppendResult SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
408 {
409     m_parsingSucceeded = true;
410
411     LOG(Media, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
412     [m_parser appendStreamData:[NSData dataWithBytes:data length:length]];
413
414     if (m_parsingSucceeded && m_mediaSource)
415         m_mediaSource->player()->setLoadingProgresssed(true);
416
417     return m_parsingSucceeded ? AppendSucceeded : ParsingFailed;
418 }
419
420 void SourceBufferPrivateAVFObjC::abort()
421 {
422     notImplemented();
423 }
424
425 void SourceBufferPrivateAVFObjC::destroyRenderers()
426 {
427     if (m_displayLayer) {
428         if (m_mediaSource)
429             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
430         [m_displayLayer flush];
431         [m_displayLayer stopRequestingMediaData];
432         m_displayLayer = nullptr;
433     }
434
435     for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
436         AVSampleBufferAudioRenderer* renderer = it->second.get();
437         if (m_mediaSource)
438             m_mediaSource->player()->removeAudioRenderer(renderer);
439         [renderer flush];
440         [renderer stopRequestingMediaData];
441     }
442
443     m_audioRenderers.clear();
444 }
445
446 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
447 {
448     destroyRenderers();
449
450     if (m_mediaSource)
451         m_mediaSource->removeSourceBuffer(this);
452 }
453
454 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
455 {
456     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
457 }
458
459 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
460 {
461     if (m_mediaSource)
462         m_mediaSource->player()->setReadyState(readyState);
463 }
464
465 void SourceBufferPrivateAVFObjC::evictCodedFrames()
466 {
467     notImplemented();
468 }
469
470 bool SourceBufferPrivateAVFObjC::isFull()
471 {
472     notImplemented();
473     return false;
474 }
475
476
477 bool SourceBufferPrivateAVFObjC::hasVideo() const
478 {
479     if (!m_client)
480         return false;
481
482     return m_client->sourceBufferPrivateHasVideo(this);
483 }
484
485 bool SourceBufferPrivateAVFObjC::hasAudio() const
486 {
487     if (!m_client)
488         return false;
489
490     return m_client->sourceBufferPrivateHasAudio(this);
491 }
492
493 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
494 {
495     int trackID = track->trackID();
496     if (!track->selected() && m_enabledVideoTrackID == trackID) {
497         m_enabledVideoTrackID = -1;
498         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
499         if (m_mediaSource)
500             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
501     } else if (track->selected()) {
502         m_enabledVideoTrackID = trackID;
503         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
504         if (!m_displayLayer) {
505             m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
506             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
507                 didBecomeReadyForMoreSamples(trackID);
508             }];
509         }
510         if (m_mediaSource)
511             m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
512     }
513 }
514
515 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
516 {
517     int trackID = track->trackID();
518
519     if (!track->enabled()) {
520         AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
521         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
522         if (m_mediaSource)
523             m_mediaSource->player()->removeAudioRenderer(renderer);
524     } else {
525         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
526         AVSampleBufferAudioRenderer* renderer;
527         if (!m_audioRenderers.count(trackID)) {
528             renderer = [[getAVSampleBufferAudioRendererClass() alloc] init];
529             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
530                 didBecomeReadyForMoreSamples(trackID);
531             }];
532             m_audioRenderers[trackID] = renderer;
533         } else
534             renderer = m_audioRenderers[trackID].get();
535
536         if (m_mediaSource)
537             m_mediaSource->player()->addAudioRenderer(renderer);
538     }
539 }
540
541 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
542 {
543     CMSampleBufferRef newSampleBuffer = 0;
544     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
545     if (!newSampleBuffer)
546         return sampleBuffer;
547
548     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
549     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
550         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
551         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
552     }
553
554     return adoptCF(newSampleBuffer);
555 }
556
557 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
558 {
559     int trackID = trackIDString.toInt();
560     LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
561
562     if (trackID == m_enabledVideoTrackID)
563         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
564     else if (m_audioRenderers.count(trackID))
565         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
566 }
567
568 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
569 {
570     [renderer flush];
571
572     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
573         RefPtr<MediaSample>& mediaSample = *it;
574
575         PlatformSample platformSample = mediaSample->platformSample();
576         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
577
578         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
579
580         [renderer enqueueSampleBuffer:sampleBuffer.get()];
581     }
582 }
583
584 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
585 {
586     [layer flush];
587
588     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
589         RefPtr<MediaSample>& mediaSample = *it;
590
591         PlatformSample platformSample = mediaSample->platformSample();
592         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
593
594         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
595
596         [layer enqueueSampleBuffer:sampleBuffer.get()];
597     }
598
599     if (m_mediaSource)
600         m_mediaSource->player()->setHasAvailableVideoFrame(false);
601 }
602
603 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
604 {
605     int trackID = trackIDString.toInt();
606     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
607         return;
608
609     RefPtr<MediaSample> mediaSample = prpMediaSample;
610
611     PlatformSample platformSample = mediaSample->platformSample();
612     if (platformSample.type != PlatformSample::CMSampleBufferType)
613         return;
614
615     if (trackID == m_enabledVideoTrackID) {
616         [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
617         if (m_mediaSource)
618             m_mediaSource->player()->setHasAvailableVideoFrame(true);
619     } else
620         [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
621 }
622
623 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
624 {
625     int trackID = trackIDString.toInt();
626     if (trackID == m_enabledVideoTrackID)
627         return [m_displayLayer isReadyForMoreMediaData];
628     else if (m_audioRenderers.count(trackID))
629         return [m_audioRenderers[trackID] isReadyForMoreMediaData];
630     else
631         ASSERT_NOT_REACHED();
632
633     return false;
634 }
635
636 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
637 {
638     if (m_mediaSource)
639         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
640 }
641
642 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
643 {
644     if (m_client)
645         return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
646     return time;
647 }
648
649 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
650 {
651     if (m_client)
652         m_client->sourceBufferPrivateSeekToTime(this, time);
653 }
654
655 IntSize SourceBufferPrivateAVFObjC::naturalSize()
656 {
657     for (auto videoTrack : m_videoTracks) {
658         if (videoTrack->selected())
659             return videoTrack->naturalSize();
660     }
661
662     return IntSize();
663 }
664
665 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
666 {
667     if (trackID == m_enabledVideoTrackID)
668         [m_displayLayer stopRequestingMediaData];
669     else if (m_audioRenderers.count(trackID))
670         [m_audioRenderers[trackID] stopRequestingMediaData];
671     else {
672         ASSERT_NOT_REACHED();
673         return;
674     }
675
676     if (m_client)
677         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
678 }
679
680 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
681 {
682     int trackID = trackIDString.toInt();
683     if (trackID == m_enabledVideoTrackID) {
684         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
685             didBecomeReadyForMoreSamples(trackID);
686         }];
687     } else if (m_audioRenderers.count(trackID)) {
688         [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
689             didBecomeReadyForMoreSamples(trackID);
690         }];
691     } else
692         ASSERT_NOT_REACHED();
693 }
694
695 }
696
697 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)