[MSE][Mac] Report the intrinsic size of the media element
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "ExceptionCodePlaceholder.h"
32 #import "Logging.h"
33 #import "MediaDescription.h"
34 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
35 #import "MediaSample.h"
36 #import "MediaSourcePrivateAVFObjC.h"
37 #import "MediaTimeMac.h"
38 #import "NotImplemented.h"
39 #import "SoftLinking.h"
40 #import "SourceBufferPrivateClient.h"
41 #import "TimeRanges.h"
42 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
43 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import <AVFoundation/AVAssetTrack.h>
46 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
47 #import <objc/runtime.h>
48 #import <wtf/text/AtomicString.h>
49 #import <wtf/text/CString.h>
50 #import <wtf/HashCountedSet.h>
51 #import <wtf/WeakPtr.h>
52 #import <map>
53
54 #pragma mark -
55 #pragma mark Soft Linking
56
57 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
58 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
59
60 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
61 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
62 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
63 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
64
65 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
66 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
67 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
68
69 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
70 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
71 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
72 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
73 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
74 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
75 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
76 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
77
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
81
82 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
83 SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
84 SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
85 SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
86 SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
87 SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
88 SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
89 SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
90 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
91 SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
92
93 #define AVMediaTypeVideo getAVMediaTypeVideo()
94 #define AVMediaTypeAudio getAVMediaTypeAudio()
95 #define AVMediaTypeText getAVMediaTypeText()
96 #define kCMTimeZero getkCMTimeZero()
97 #define kCMTimeInvalid getkCMTimeInvalid()
98 #define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
99 #define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
100 #define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
101 #define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
102 #define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
103 #define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
104
105 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
106 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
107 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
108
109 #pragma mark -
110 #pragma mark AVStreamDataParser
111
112 @interface AVStreamDataParser : NSObject
113 - (void)setDelegate:(id)delegate;
114 - (void)appendStreamData:(NSData *)data;
115 - (void)setShouldProvideMediaData:(BOOL)shouldProvideMediaData forTrackID:(CMPersistentTrackID)trackID;
116 - (BOOL)shouldProvideMediaDataForTrackID:(CMPersistentTrackID)trackID;
117 @end
118
119 #pragma mark -
120 #pragma mark AVSampleBufferAudioRenderer
121
122 #if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
123 @interface AVSampleBufferAudioRenderer : NSObject
124 - (NSInteger)status;
125 - (NSError*)error;
126 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
127 - (void)flush;
128 - (BOOL)isReadyForMoreMediaData;
129 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
130 - (void)stopRequestingMediaData;
131 @end
132 #endif
133
134 #pragma mark -
135 #pragma mark WebAVStreamDataParserListener
136
137 @interface WebAVStreamDataParserListener : NSObject {
138     WebCore::SourceBufferPrivateAVFObjC* _parent;
139     AVStreamDataParser* _parser;
140 }
141 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
142 @end
143
144 @implementation WebAVStreamDataParserListener
145 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent
146 {
147     self = [super init];
148     if (!self)
149         return nil;
150
151     ASSERT(parent);
152     _parent = parent;
153     _parser = parser;
154     [_parser setDelegate:self];
155     return self;
156 }
157
158 - (void)dealloc
159 {
160     [_parser setDelegate:nil];
161     [super dealloc];
162 }
163
164 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
165 {
166 #if ASSERT_DISABLED
167     UNUSED_PARAM(streamDataParser);
168 #endif
169     ASSERT(streamDataParser == _parser);
170     _parent->didParseStreamDataAsAsset(asset);
171 }
172
173 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
174 {
175 #if ASSERT_DISABLED
176     UNUSED_PARAM(streamDataParser);
177 #endif
178     ASSERT(streamDataParser == _parser);
179     _parent->didFailToParseStreamDataWithError(error);
180 }
181
182 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)mediaData forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType flags:(NSUInteger)flags
183 {
184 #if ASSERT_DISABLED
185     UNUSED_PARAM(streamDataParser);
186 #endif
187     ASSERT(streamDataParser == _parser);
188     _parent->didProvideMediaDataForTrackID(trackID, mediaData, mediaType, flags);
189 }
190
191 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType
192 {
193 #if ASSERT_DISABLED
194     UNUSED_PARAM(streamDataParser);
195 #endif
196     ASSERT(streamDataParser == _parser);
197     _parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
198 }
199 @end
200
201 namespace WebCore {
202
203 #pragma mark -
204 #pragma mark MediaSampleAVFObjC
205
206 class MediaSampleAVFObjC FINAL : public MediaSample {
207 public:
208     static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
209     virtual ~MediaSampleAVFObjC() { }
210
211     virtual MediaTime presentationTime() const OVERRIDE { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
212     virtual MediaTime decodeTime() const OVERRIDE { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
213     virtual MediaTime duration() const OVERRIDE { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
214     virtual AtomicString trackID() const OVERRIDE { return m_id; }
215
216     virtual SampleFlags flags() const OVERRIDE;
217     virtual PlatformSample platformSample() OVERRIDE;
218
219 protected:
220     MediaSampleAVFObjC(CMSampleBufferRef sample, int trackID)
221         : m_sample(sample)
222         , m_id(String::format("%d", trackID))
223     {
224     }
225
226     RetainPtr<CMSampleBufferRef> m_sample;
227     AtomicString m_id;
228 };
229
230 PlatformSample MediaSampleAVFObjC::platformSample()
231 {
232     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
233     return sample;
234 }
235
236 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
237 {
238     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
239     if (!attachments)
240         return true;
241
242     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
243         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
244         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
245             return false;
246     }
247     return true;
248 }
249
250 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
251 {
252     int returnValue = MediaSample::None;
253
254     if (CMSampleBufferIsRandomAccess(m_sample.get()))
255         returnValue |= MediaSample::IsSync;
256
257     return SampleFlags(returnValue);
258 }
259
260 #pragma mark -
261 #pragma mark MediaDescriptionAVFObjC
262
263 class MediaDescriptionAVFObjC FINAL : public MediaDescription {
264 public:
265     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
266     virtual ~MediaDescriptionAVFObjC() { }
267
268     virtual AtomicString codec() const OVERRIDE { return m_codec; }
269     virtual bool isVideo() const OVERRIDE { return m_isVideo; }
270     virtual bool isAudio() const OVERRIDE { return m_isAudio; }
271     virtual bool isText() const OVERRIDE { return m_isText; }
272     
273 protected:
274     MediaDescriptionAVFObjC(AVAssetTrack* track)
275         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
276         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
277         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
278     {
279         NSArray* formatDescriptions = [track formatDescriptions];
280         CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
281         if (description) {
282             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
283             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
284         }
285     }
286
287     AtomicString m_codec;
288     bool m_isVideo;
289     bool m_isAudio;
290     bool m_isText;
291 };
292
293 #pragma mark -
294 #pragma mark SourceBufferPrivateAVFObjC
295
296 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
297 {
298     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
299 }
300
301 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
302     : m_parser(adoptNS([[getAVStreamDataParserClass() alloc] init]))
303     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:this]))
304     , m_mediaSource(parent)
305     , m_client(0)
306     , m_parsingSucceeded(true)
307     , m_enabledVideoTrackID(-1)
308 {
309 }
310
311 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
312 {
313     destroyRenderers();
314 }
315
316 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
317 {
318     LOG(Media, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
319
320     m_asset = asset;
321
322     m_videoTracks.clear();
323     m_audioTracks.clear();
324
325     SourceBufferPrivateClient::InitializationSegment segment;
326     segment.duration = toMediaTime([m_asset duration]);
327
328     for (AVAssetTrack* track in [m_asset tracks]) {
329         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
330             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
331             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
332             info.track = videoTrack;
333             m_videoTracks.append(videoTrack);
334             info.description = MediaDescriptionAVFObjC::create(track);
335             segment.videoTracks.append(info);
336         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
337             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
338             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
339             info.track = audioTrack;
340             m_audioTracks.append(audioTrack);
341             info.description = MediaDescriptionAVFObjC::create(track);
342             segment.audioTracks.append(info);
343         }
344
345         // FIXME(125161): Add TextTrack support
346     }
347
348     if (!m_videoTracks.isEmpty())
349         m_mediaSource->player()->sizeChanged();
350
351     if (m_client)
352         m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
353 }
354
355 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
356 {
357 #if LOG_DISABLED
358     UNUSED_PARAM(error);
359 #endif
360     LOG(Media, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
361
362     m_parsingSucceeded = false;
363 }
364
365 struct ProcessCodedFrameInfo {
366     SourceBufferPrivateAVFObjC* sourceBuffer;
367     int trackID;
368     const String& mediaType;
369 };
370
371 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
372 {
373     UNUSED_PARAM(flags);
374
375     processCodedFrame(trackID, sampleBuffer, mediaType);
376 }
377
378 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
379 {
380     if (m_client)
381         m_client->sourceBufferPrivateDidReceiveSample(this, MediaSampleAVFObjC::create(sampleBuffer, trackID));
382
383     return true;
384 }
385
386 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
387 {
388     UNUSED_PARAM(mediaType);
389     UNUSED_PARAM(trackID);
390     notImplemented();
391 }
392
393 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
394 {
395     m_client = client;
396 }
397
398 SourceBufferPrivate::AppendResult SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
399 {
400     m_parsingSucceeded = true;
401
402     LOG(Media, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
403     [m_parser appendStreamData:[NSData dataWithBytes:data length:length]];
404
405     if (m_parsingSucceeded && m_mediaSource)
406         m_mediaSource->player()->setLoadingProgresssed(true);
407
408     return m_parsingSucceeded ? AppendSucceeded : ParsingFailed;
409 }
410
411 void SourceBufferPrivateAVFObjC::abort()
412 {
413     notImplemented();
414 }
415
416 void SourceBufferPrivateAVFObjC::destroyRenderers()
417 {
418     if (m_displayLayer) {
419         if (m_mediaSource)
420             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
421         [m_displayLayer flush];
422         [m_displayLayer stopRequestingMediaData];
423         m_displayLayer = nullptr;
424     }
425
426     for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
427         AVSampleBufferAudioRenderer* renderer = it->second.get();
428         if (m_mediaSource)
429             m_mediaSource->player()->removeAudioRenderer(renderer);
430         [renderer flush];
431         [renderer stopRequestingMediaData];
432     }
433
434     m_audioRenderers.clear();
435 }
436
437 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
438 {
439     destroyRenderers();
440
441     if (m_mediaSource)
442         m_mediaSource->removeSourceBuffer(this);
443 }
444
445 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
446 {
447     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
448 }
449
450 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
451 {
452     if (m_mediaSource)
453         m_mediaSource->player()->setReadyState(readyState);
454 }
455
456 void SourceBufferPrivateAVFObjC::evictCodedFrames()
457 {
458     notImplemented();
459 }
460
461 bool SourceBufferPrivateAVFObjC::isFull()
462 {
463     notImplemented();
464     return false;
465 }
466
467
468 bool SourceBufferPrivateAVFObjC::hasVideo() const
469 {
470     if (!m_client)
471         return false;
472
473     return m_client->sourceBufferPrivateHasVideo(this);
474 }
475
476 bool SourceBufferPrivateAVFObjC::hasAudio() const
477 {
478     if (!m_client)
479         return false;
480
481     return m_client->sourceBufferPrivateHasAudio(this);
482 }
483
484 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
485 {
486     int trackID = track->trackID();
487     if (!track->selected() && m_enabledVideoTrackID == trackID) {
488         m_enabledVideoTrackID = -1;
489         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
490         if (m_mediaSource)
491             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
492     } else if (track->selected()) {
493         m_enabledVideoTrackID = trackID;
494         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
495         if (!m_displayLayer) {
496             m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
497             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
498                 didBecomeReadyForMoreSamples(trackID);
499             }];
500         }
501         if (m_mediaSource)
502             m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
503     }
504 }
505
506 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
507 {
508     int trackID = track->trackID();
509
510     if (!track->enabled()) {
511         AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
512         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
513         if (m_mediaSource)
514             m_mediaSource->player()->removeAudioRenderer(renderer);
515     } else {
516         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
517         AVSampleBufferAudioRenderer* renderer;
518         if (!m_audioRenderers.count(trackID)) {
519             renderer = [[getAVSampleBufferAudioRendererClass() alloc] init];
520             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
521                 didBecomeReadyForMoreSamples(trackID);
522             }];
523             m_audioRenderers[trackID] = renderer;
524         } else
525             renderer = m_audioRenderers[trackID].get();
526
527         if (m_mediaSource)
528             m_mediaSource->player()->addAudioRenderer(renderer);
529     }
530 }
531
532 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
533 {
534     CMSampleBufferRef newSampleBuffer = 0;
535     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
536     if (!newSampleBuffer)
537         return sampleBuffer;
538
539     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
540     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
541         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
542         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
543     }
544
545     return adoptCF(newSampleBuffer);
546 }
547
548 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
549 {
550     int trackID = trackIDString.toInt();
551     LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
552
553     if (trackID == m_enabledVideoTrackID)
554         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
555     else if (m_audioRenderers.count(trackID))
556         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
557 }
558
559 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
560 {
561     [renderer flush];
562
563     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
564         RefPtr<MediaSample>& mediaSample = *it;
565
566         PlatformSample platformSample = mediaSample->platformSample();
567         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
568
569         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
570
571         [renderer enqueueSampleBuffer:sampleBuffer.get()];
572     }
573 }
574
575 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
576 {
577     [layer flush];
578
579     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
580         RefPtr<MediaSample>& mediaSample = *it;
581
582         PlatformSample platformSample = mediaSample->platformSample();
583         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
584
585         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
586
587         [layer enqueueSampleBuffer:sampleBuffer.get()];
588     }
589
590     if (m_mediaSource)
591         m_mediaSource->player()->setHasAvailableVideoFrame(false);
592 }
593
594 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
595 {
596     int trackID = trackIDString.toInt();
597     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
598         return;
599
600     RefPtr<MediaSample> mediaSample = prpMediaSample;
601
602     PlatformSample platformSample = mediaSample->platformSample();
603     if (platformSample.type != PlatformSample::CMSampleBufferType)
604         return;
605
606     if (trackID == m_enabledVideoTrackID) {
607         [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
608         if (m_mediaSource)
609             m_mediaSource->player()->setHasAvailableVideoFrame(true);
610     } else
611         [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
612 }
613
614 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
615 {
616     int trackID = trackIDString.toInt();
617     if (trackID == m_enabledVideoTrackID)
618         return [m_displayLayer isReadyForMoreMediaData];
619     else if (m_audioRenderers.count(trackID))
620         return [m_audioRenderers[trackID] isReadyForMoreMediaData];
621     else
622         ASSERT_NOT_REACHED();
623
624     return false;
625 }
626
627 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
628 {
629     if (m_mediaSource)
630         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
631 }
632
633 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
634 {
635     if (m_client)
636         return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
637     return time;
638 }
639
640 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
641 {
642     if (m_client)
643         m_client->sourceBufferPrivateSeekToTime(this, time);
644 }
645
646 IntSize SourceBufferPrivateAVFObjC::naturalSize()
647 {
648     for (auto videoTrack : m_videoTracks) {
649         if (videoTrack->selected())
650             return videoTrack->naturalSize();
651     }
652
653     return IntSize();
654 }
655
656 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
657 {
658     if (trackID == m_enabledVideoTrackID)
659         [m_displayLayer stopRequestingMediaData];
660     else if (m_audioRenderers.count(trackID))
661         [m_audioRenderers[trackID] stopRequestingMediaData];
662     else {
663         ASSERT_NOT_REACHED();
664         return;
665     }
666
667     if (m_client)
668         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
669 }
670
671 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
672 {
673     int trackID = trackIDString.toInt();
674     if (trackID == m_enabledVideoTrackID) {
675         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
676             didBecomeReadyForMoreSamples(trackID);
677         }];
678     } else if (m_audioRenderers.count(trackID)) {
679         [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
680             didBecomeReadyForMoreSamples(trackID);
681         }];
682     } else
683         ASSERT_NOT_REACHED();
684 }
685
686 }
687
688 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)