2 * Copyright (C) 2013 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #import "SourceBufferPrivateAVFObjC.h"
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
31 #import "ExceptionCodePlaceholder.h"
33 #import "MediaDescription.h"
34 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
35 #import "MediaSample.h"
36 #import "MediaSourcePrivateAVFObjC.h"
37 #import "MediaTimeMac.h"
38 #import "NotImplemented.h"
39 #import "SoftLinking.h"
40 #import "SourceBufferPrivateClient.h"
41 #import "TimeRanges.h"
42 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
43 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import <AVFoundation/AVAssetTrack.h>
46 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
47 #import <objc/runtime.h>
48 #import <wtf/text/AtomicString.h>
49 #import <wtf/text/CString.h>
50 #import <wtf/HashCountedSet.h>
51 #import <wtf/WeakPtr.h>
55 #pragma mark Soft Linking
57 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
58 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
60 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
61 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
62 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
63 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
65 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
66 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
67 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
69 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
70 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
71 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
72 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
73 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
74 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
75 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
76 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
78 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
82 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
83 SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
84 SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
85 SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
86 SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
87 SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
88 SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
89 SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
90 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
91 SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
93 #define AVMediaTypeVideo getAVMediaTypeVideo()
94 #define AVMediaTypeAudio getAVMediaTypeAudio()
95 #define AVMediaTypeText getAVMediaTypeText()
96 #define kCMTimeZero getkCMTimeZero()
97 #define kCMTimeInvalid getkCMTimeInvalid()
98 #define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
99 #define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
100 #define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
101 #define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
102 #define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
103 #define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
105 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
106 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
107 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
110 #pragma mark AVStreamDataParser
112 @interface AVStreamDataParser : NSObject
113 - (void)setDelegate:(id)delegate;
114 - (void)appendStreamData:(NSData *)data;
115 - (void)setShouldProvideMediaData:(BOOL)shouldProvideMediaData forTrackID:(CMPersistentTrackID)trackID;
116 - (BOOL)shouldProvideMediaDataForTrackID:(CMPersistentTrackID)trackID;
120 #pragma mark AVSampleBufferAudioRenderer
122 #if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
123 @interface AVSampleBufferAudioRenderer : NSObject
126 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
128 - (BOOL)isReadyForMoreMediaData;
129 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
130 - (void)stopRequestingMediaData;
135 #pragma mark WebAVStreamDataParserListener
137 @interface WebAVStreamDataParserListener : NSObject {
138 WebCore::SourceBufferPrivateAVFObjC* _parent;
139 AVStreamDataParser* _parser;
141 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
144 @implementation WebAVStreamDataParserListener
145 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent
154 [_parser setDelegate:self];
160 [_parser setDelegate:nil];
164 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
167 UNUSED_PARAM(streamDataParser);
169 ASSERT(streamDataParser == _parser);
170 _parent->didParseStreamDataAsAsset(asset);
173 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
176 UNUSED_PARAM(streamDataParser);
178 ASSERT(streamDataParser == _parser);
179 _parent->didFailToParseStreamDataWithError(error);
182 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)mediaData forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType flags:(NSUInteger)flags
185 UNUSED_PARAM(streamDataParser);
187 ASSERT(streamDataParser == _parser);
188 _parent->didProvideMediaDataForTrackID(trackID, mediaData, mediaType, flags);
191 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType
194 UNUSED_PARAM(streamDataParser);
196 ASSERT(streamDataParser == _parser);
197 _parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
204 #pragma mark MediaSampleAVFObjC
206 class MediaSampleAVFObjC FINAL : public MediaSample {
208 static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
209 virtual ~MediaSampleAVFObjC() { }
211 virtual MediaTime presentationTime() const OVERRIDE { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
212 virtual MediaTime decodeTime() const OVERRIDE { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
213 virtual MediaTime duration() const OVERRIDE { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
214 virtual AtomicString trackID() const OVERRIDE { return m_id; }
216 virtual SampleFlags flags() const OVERRIDE;
217 virtual PlatformSample platformSample() OVERRIDE;
220 MediaSampleAVFObjC(CMSampleBufferRef sample, int trackID)
222 , m_id(String::format("%d", trackID))
226 RetainPtr<CMSampleBufferRef> m_sample;
230 PlatformSample MediaSampleAVFObjC::platformSample()
232 PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
236 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
238 CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
242 for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
243 CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
244 if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
250 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
252 int returnValue = MediaSample::None;
254 if (CMSampleBufferIsRandomAccess(m_sample.get()))
255 returnValue |= MediaSample::IsSync;
257 return SampleFlags(returnValue);
261 #pragma mark MediaDescriptionAVFObjC
263 class MediaDescriptionAVFObjC FINAL : public MediaDescription {
265 static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
266 virtual ~MediaDescriptionAVFObjC() { }
268 virtual AtomicString codec() const OVERRIDE { return m_codec; }
269 virtual bool isVideo() const OVERRIDE { return m_isVideo; }
270 virtual bool isAudio() const OVERRIDE { return m_isAudio; }
271 virtual bool isText() const OVERRIDE { return m_isText; }
274 MediaDescriptionAVFObjC(AVAssetTrack* track)
275 : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
276 , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
277 , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
279 NSArray* formatDescriptions = [track formatDescriptions];
280 CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
282 FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
283 m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
287 AtomicString m_codec;
294 #pragma mark SourceBufferPrivateAVFObjC
296 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
298 return adoptRef(new SourceBufferPrivateAVFObjC(parent));
301 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
302 : m_parser(adoptNS([[getAVStreamDataParserClass() alloc] init]))
303 , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:this]))
304 , m_mediaSource(parent)
306 , m_parsingSucceeded(true)
307 , m_enabledVideoTrackID(-1)
311 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
316 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
318 LOG(Media, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
322 SourceBufferPrivateClient::InitializationSegment segment;
323 segment.duration = toMediaTime([m_asset duration]);
325 for (AVAssetTrack* track in [m_asset tracks]) {
326 if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
327 SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
328 info.track = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
329 info.description = MediaDescriptionAVFObjC::create(track);
330 segment.videoTracks.append(info);
331 } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
332 SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
333 info.track = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
334 info.description = MediaDescriptionAVFObjC::create(track);
335 segment.audioTracks.append(info);
338 // FIXME(125161): Add TextTrack support
342 m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
345 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
350 LOG(Media, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
352 m_parsingSucceeded = false;
355 struct ProcessCodedFrameInfo {
356 SourceBufferPrivateAVFObjC* sourceBuffer;
358 const String& mediaType;
361 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
365 processCodedFrame(trackID, sampleBuffer, mediaType);
368 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
371 m_client->sourceBufferPrivateDidReceiveSample(this, MediaSampleAVFObjC::create(sampleBuffer, trackID));
376 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
378 UNUSED_PARAM(mediaType);
379 UNUSED_PARAM(trackID);
383 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
388 SourceBufferPrivate::AppendResult SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
390 m_parsingSucceeded = true;
392 LOG(Media, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
393 [m_parser appendStreamData:[NSData dataWithBytes:data length:length]];
395 if (m_parsingSucceeded && m_mediaSource)
396 m_mediaSource->player()->setLoadingProgresssed(true);
398 return m_parsingSucceeded ? AppendSucceeded : ParsingFailed;
401 void SourceBufferPrivateAVFObjC::abort()
406 void SourceBufferPrivateAVFObjC::destroyRenderers()
408 if (m_displayLayer) {
410 m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
411 [m_displayLayer flush];
412 [m_displayLayer stopRequestingMediaData];
413 m_displayLayer = nullptr;
416 for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
417 AVSampleBufferAudioRenderer* renderer = it->second.get();
419 m_mediaSource->player()->removeAudioRenderer(renderer);
421 [renderer stopRequestingMediaData];
424 m_audioRenderers.clear();
427 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
432 m_mediaSource->removeSourceBuffer(this);
435 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
437 return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
440 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
443 m_mediaSource->player()->setReadyState(readyState);
446 void SourceBufferPrivateAVFObjC::evictCodedFrames()
451 bool SourceBufferPrivateAVFObjC::isFull()
458 bool SourceBufferPrivateAVFObjC::hasVideo() const
463 return m_client->sourceBufferPrivateHasVideo(this);
466 bool SourceBufferPrivateAVFObjC::hasAudio() const
471 return m_client->sourceBufferPrivateHasAudio(this);
474 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
476 int trackID = track->trackID();
477 if (!track->selected() && m_enabledVideoTrackID == trackID) {
478 m_enabledVideoTrackID = -1;
479 [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
481 m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
482 } else if (track->selected()) {
483 m_enabledVideoTrackID = trackID;
484 [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
485 if (!m_displayLayer) {
486 m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
487 [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
488 didBecomeReadyForMoreSamples(trackID);
492 m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
496 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
498 int trackID = track->trackID();
500 if (!track->enabled()) {
501 AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
502 [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
504 m_mediaSource->player()->removeAudioRenderer(renderer);
506 [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
507 AVSampleBufferAudioRenderer* renderer;
508 if (!m_audioRenderers.count(trackID)) {
509 renderer = [[getAVSampleBufferAudioRendererClass() alloc] init];
510 [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
511 didBecomeReadyForMoreSamples(trackID);
513 m_audioRenderers[trackID] = renderer;
515 renderer = m_audioRenderers[trackID].get();
518 m_mediaSource->player()->addAudioRenderer(renderer);
522 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
524 CMSampleBufferRef newSampleBuffer = 0;
525 CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
526 if (!newSampleBuffer)
529 CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
530 for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
531 CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
532 CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
535 return adoptCF(newSampleBuffer);
538 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
540 int trackID = trackIDString.toInt();
541 LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
543 if (trackID == m_enabledVideoTrackID)
544 flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
545 else if (m_audioRenderers.count(trackID))
546 flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
549 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
553 for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
554 RefPtr<MediaSample>& mediaSample = *it;
556 PlatformSample platformSample = mediaSample->platformSample();
557 ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
559 RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
561 [renderer enqueueSampleBuffer:sampleBuffer.get()];
565 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
569 for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
570 RefPtr<MediaSample>& mediaSample = *it;
572 PlatformSample platformSample = mediaSample->platformSample();
573 ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
575 RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
577 [layer enqueueSampleBuffer:sampleBuffer.get()];
581 m_mediaSource->player()->setHasAvailableVideoFrame(false);
584 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
586 int trackID = trackIDString.toInt();
587 if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
590 RefPtr<MediaSample> mediaSample = prpMediaSample;
592 PlatformSample platformSample = mediaSample->platformSample();
593 if (platformSample.type != PlatformSample::CMSampleBufferType)
596 if (trackID == m_enabledVideoTrackID) {
597 [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
599 m_mediaSource->player()->setHasAvailableVideoFrame(true);
601 [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
604 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
606 int trackID = trackIDString.toInt();
607 if (trackID == m_enabledVideoTrackID)
608 return [m_displayLayer isReadyForMoreMediaData];
609 else if (m_audioRenderers.count(trackID))
610 return [m_audioRenderers[trackID] isReadyForMoreMediaData];
612 ASSERT_NOT_REACHED();
617 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
620 m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
623 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
626 return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
630 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
633 m_client->sourceBufferPrivateSeekToTime(this, time);
636 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
638 if (trackID == m_enabledVideoTrackID)
639 [m_displayLayer stopRequestingMediaData];
640 else if (m_audioRenderers.count(trackID))
641 [m_audioRenderers[trackID] stopRequestingMediaData];
643 ASSERT_NOT_REACHED();
648 m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
651 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
653 int trackID = trackIDString.toInt();
654 if (trackID == m_enabledVideoTrackID) {
655 [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
656 didBecomeReadyForMoreSamples(trackID);
658 } else if (m_audioRenderers.count(trackID)) {
659 [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
660 didBecomeReadyForMoreSamples(trackID);
663 ASSERT_NOT_REACHED();
668 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)