Avoid copy-prone idiom "for (auto item : collection)"
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
1 /*
2  * Copyright (C) 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "SourceBufferPrivateAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "ExceptionCodePlaceholder.h"
32 #import "Logging.h"
33 #import "MediaDescription.h"
34 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
35 #import "MediaSample.h"
36 #import "MediaSourcePrivateAVFObjC.h"
37 #import "MediaTimeMac.h"
38 #import "NotImplemented.h"
39 #import "SoftLinking.h"
40 #import "SourceBufferPrivateClient.h"
41 #import "TimeRanges.h"
42 #import "AudioTrackPrivateMediaSourceAVFObjC.h"
43 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import <AVFoundation/AVAssetTrack.h>
46 #import <CoreMedia/CMSampleBuffer.h>
47 #import <QuartzCore/CALayer.h>
48 #import <objc/runtime.h>
49 #import <wtf/text/AtomicString.h>
50 #import <wtf/text/CString.h>
51 #import <wtf/HashCountedSet.h>
52 #import <wtf/WeakPtr.h>
53 #import <map>
54
55 #pragma mark -
56 #pragma mark Soft Linking
57
58 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
59 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
60
61 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
62 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
63 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
64 SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
65
66 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
67 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
68 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
69
70 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
71 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
72 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
73 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
74 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
75 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
76 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
77 SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
80 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
81 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
82
83 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
84 SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
85 SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
86 SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
87 SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
88 SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
89 SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
90 SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
91 SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
92 SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
93
94 #define AVMediaTypeVideo getAVMediaTypeVideo()
95 #define AVMediaTypeAudio getAVMediaTypeAudio()
96 #define AVMediaTypeText getAVMediaTypeText()
97 #define kCMTimeZero getkCMTimeZero()
98 #define kCMTimeInvalid getkCMTimeInvalid()
99 #define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
100 #define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
101 #define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
102 #define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
103 #define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
104 #define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
105
106 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
107 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
108 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
109
110 #pragma mark -
111 #pragma mark AVStreamDataParser
112
113 @interface AVStreamDataParser : NSObject
114 - (void)setDelegate:(id)delegate;
115 - (void)appendStreamData:(NSData *)data;
116 - (void)setShouldProvideMediaData:(BOOL)shouldProvideMediaData forTrackID:(CMPersistentTrackID)trackID;
117 - (BOOL)shouldProvideMediaDataForTrackID:(CMPersistentTrackID)trackID;
118 - (void)processContentKeyResponseData:(NSData *)contentKeyResponseData forTrackID:(CMPersistentTrackID)trackID;
119 - (void)processContentKeyResponseError:(NSError *)error forTrackID:(CMPersistentTrackID)trackID;
120 - (void)renewExpiringContentKeyResponseDataForTrackID:(CMPersistentTrackID)trackID;
121 - (NSData *)streamingContentKeyRequestDataForApp:(NSData *)appIdentifier contentIdentifier:(NSData *)contentIdentifier trackID:(CMPersistentTrackID)trackID options:(NSDictionary *)options error:(NSError **)outError;
122 @end
123
124 #pragma mark -
125 #pragma mark AVSampleBufferDisplayLayer
126
127 @interface AVSampleBufferDisplayLayer : CALayer
128 - (NSInteger)status;
129 - (NSError*)error;
130 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
131 - (void)flush;
132 - (BOOL)isReadyForMoreMediaData;
133 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
134 - (void)stopRequestingMediaData;
135 @end
136
137 #pragma mark -
138 #pragma mark AVSampleBufferAudioRenderer
139
140 @interface AVSampleBufferAudioRenderer : NSObject
141 - (NSInteger)status;
142 - (NSError*)error;
143 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
144 - (void)flush;
145 - (BOOL)isReadyForMoreMediaData;
146 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
147 - (void)stopRequestingMediaData;
148 @end
149
150 #pragma mark -
151 #pragma mark WebAVStreamDataParserListener
152
153 @interface WebAVStreamDataParserListener : NSObject {
154     WebCore::SourceBufferPrivateAVFObjC* _parent;
155     AVStreamDataParser* _parser;
156 }
157 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
158 @end
159
160 @implementation WebAVStreamDataParserListener
161 - (id)initWithParser:(AVStreamDataParser*)parser parent:(WebCore::SourceBufferPrivateAVFObjC*)parent
162 {
163     self = [super init];
164     if (!self)
165         return nil;
166
167     ASSERT(parent);
168     _parent = parent;
169     _parser = parser;
170     [_parser setDelegate:self];
171     return self;
172 }
173
174 - (void)dealloc
175 {
176     [_parser setDelegate:nil];
177     [super dealloc];
178 }
179
180 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
181 {
182 #if ASSERT_DISABLED
183     UNUSED_PARAM(streamDataParser);
184 #endif
185     ASSERT(streamDataParser == _parser);
186     _parent->didParseStreamDataAsAsset(asset);
187 }
188
189 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
190 {
191     UNUSED_PARAM(discontinuity);
192 #if ASSERT_DISABLED
193     UNUSED_PARAM(streamDataParser);
194 #endif
195     ASSERT(streamDataParser == _parser);
196     _parent->didParseStreamDataAsAsset(asset);
197 }
198
199 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
200 {
201 #if ASSERT_DISABLED
202     UNUSED_PARAM(streamDataParser);
203 #endif
204     ASSERT(streamDataParser == _parser);
205     _parent->didFailToParseStreamDataWithError(error);
206 }
207
208 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)mediaData forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType flags:(NSUInteger)flags
209 {
210 #if ASSERT_DISABLED
211     UNUSED_PARAM(streamDataParser);
212 #endif
213     ASSERT(streamDataParser == _parser);
214     _parent->didProvideMediaDataForTrackID(trackID, mediaData, mediaType, flags);
215 }
216
217 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)mediaType
218 {
219 #if ASSERT_DISABLED
220     UNUSED_PARAM(streamDataParser);
221 #endif
222     ASSERT(streamDataParser == _parser);
223     _parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
224 }
225
226 - (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
227 {
228 #if ASSERT_DISABLED
229     UNUSED_PARAM(streamDataParser);
230 #endif
231     ASSERT(streamDataParser == _parser);
232     _parent->didProvideContentKeyRequestInitializationDataForTrackID(initData, trackID);
233 }
234 @end
235
236 namespace WebCore {
237
238 #pragma mark -
239 #pragma mark MediaSampleAVFObjC
240
241 class MediaSampleAVFObjC final : public MediaSample {
242 public:
243     static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
244     virtual ~MediaSampleAVFObjC() { }
245
246     virtual MediaTime presentationTime() const override { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
247     virtual MediaTime decodeTime() const override { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
248     virtual MediaTime duration() const override { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
249     virtual AtomicString trackID() const override { return m_id; }
250
251     virtual SampleFlags flags() const override;
252     virtual PlatformSample platformSample() override;
253
254 protected:
255     MediaSampleAVFObjC(CMSampleBufferRef sample, int trackID)
256         : m_sample(sample)
257         , m_id(String::format("%d", trackID))
258     {
259     }
260
261     RetainPtr<CMSampleBufferRef> m_sample;
262     AtomicString m_id;
263 };
264
265 PlatformSample MediaSampleAVFObjC::platformSample()
266 {
267     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
268     return sample;
269 }
270
271 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
272 {
273     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
274     if (!attachments)
275         return true;
276
277     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
278         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
279         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
280             return false;
281     }
282     return true;
283 }
284
285 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
286 {
287     int returnValue = MediaSample::None;
288
289     if (CMSampleBufferIsRandomAccess(m_sample.get()))
290         returnValue |= MediaSample::IsSync;
291
292     return SampleFlags(returnValue);
293 }
294
295 #pragma mark -
296 #pragma mark MediaDescriptionAVFObjC
297
298 class MediaDescriptionAVFObjC final : public MediaDescription {
299 public:
300     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
301     virtual ~MediaDescriptionAVFObjC() { }
302
303     virtual AtomicString codec() const override { return m_codec; }
304     virtual bool isVideo() const override { return m_isVideo; }
305     virtual bool isAudio() const override { return m_isAudio; }
306     virtual bool isText() const override { return m_isText; }
307     
308 protected:
309     MediaDescriptionAVFObjC(AVAssetTrack* track)
310         : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
311         , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
312         , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
313     {
314         NSArray* formatDescriptions = [track formatDescriptions];
315         CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
316         if (description) {
317             FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
318             m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
319         }
320     }
321
322     AtomicString m_codec;
323     bool m_isVideo;
324     bool m_isAudio;
325     bool m_isText;
326 };
327
328 #pragma mark -
329 #pragma mark SourceBufferPrivateAVFObjC
330
331 RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
332 {
333     return adoptRef(new SourceBufferPrivateAVFObjC(parent));
334 }
335
336 SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
337     : m_parser(adoptNS([[getAVStreamDataParserClass() alloc] init]))
338     , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:this]))
339     , m_mediaSource(parent)
340     , m_client(0)
341     , m_parsingSucceeded(true)
342     , m_enabledVideoTrackID(-1)
343     , m_protectedTrackID(-1)
344 {
345 }
346
347 SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
348 {
349     destroyRenderers();
350 }
351
352 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
353 {
354     LOG(Media, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
355
356     m_asset = asset;
357
358     m_videoTracks.clear();
359     m_audioTracks.clear();
360
361     SourceBufferPrivateClient::InitializationSegment segment;
362     segment.duration = toMediaTime([m_asset duration]);
363
364     for (AVAssetTrack* track in [m_asset tracks]) {
365         if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
366             SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
367             RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
368             info.track = videoTrack;
369             m_videoTracks.append(videoTrack);
370             info.description = MediaDescriptionAVFObjC::create(track);
371             segment.videoTracks.append(info);
372         } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
373             SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
374             RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
375             info.track = audioTrack;
376             m_audioTracks.append(audioTrack);
377             info.description = MediaDescriptionAVFObjC::create(track);
378             segment.audioTracks.append(info);
379         }
380
381         // FIXME(125161): Add TextTrack support
382     }
383
384     if (!m_videoTracks.isEmpty())
385         m_mediaSource->player()->sizeChanged();
386
387     if (m_client)
388         m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
389 }
390
391 void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
392 {
393 #if LOG_DISABLED
394     UNUSED_PARAM(error);
395 #endif
396     LOG(Media, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
397
398     m_parsingSucceeded = false;
399 }
400
401 struct ProcessCodedFrameInfo {
402     SourceBufferPrivateAVFObjC* sourceBuffer;
403     int trackID;
404     const String& mediaType;
405 };
406
407 void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
408 {
409     UNUSED_PARAM(flags);
410
411     processCodedFrame(trackID, sampleBuffer, mediaType);
412 }
413
414 bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
415 {
416     if (m_client)
417         m_client->sourceBufferPrivateDidReceiveSample(this, MediaSampleAVFObjC::create(sampleBuffer, trackID));
418
419     return true;
420 }
421
422 void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
423 {
424     UNUSED_PARAM(mediaType);
425     UNUSED_PARAM(trackID);
426     notImplemented();
427 }
428
429 void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID)
430 {
431     UNUSED_PARAM(trackID);
432 #if ENABLE(ENCRYPTED_MEDIA_V2)
433     LOG(Media, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
434     m_protectedTrackID = trackID;
435     RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
436     [initData getBytes:initDataArray->data() length:initDataArray->length()];
437     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
438 #else
439     UNUSED_PARAM(initData);
440 #endif
441 }
442
443 void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
444 {
445     m_client = client;
446 }
447
448 SourceBufferPrivate::AppendResult SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
449 {
450     m_parsingSucceeded = true;
451
452     LOG(Media, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
453     [m_parser appendStreamData:[NSData dataWithBytes:data length:length]];
454
455     if (m_parsingSucceeded && m_mediaSource)
456         m_mediaSource->player()->setLoadingProgresssed(true);
457
458     return m_parsingSucceeded ? AppendSucceeded : ParsingFailed;
459 }
460
461 void SourceBufferPrivateAVFObjC::abort()
462 {
463     notImplemented();
464 }
465
466 void SourceBufferPrivateAVFObjC::destroyRenderers()
467 {
468     if (m_displayLayer) {
469         if (m_mediaSource)
470             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
471         [m_displayLayer flush];
472         [m_displayLayer stopRequestingMediaData];
473         m_displayLayer = nullptr;
474     }
475
476     for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
477         AVSampleBufferAudioRenderer* renderer = it->second.get();
478         if (m_mediaSource)
479             m_mediaSource->player()->removeAudioRenderer(renderer);
480         [renderer flush];
481         [renderer stopRequestingMediaData];
482     }
483
484     m_audioRenderers.clear();
485 }
486
487 void SourceBufferPrivateAVFObjC::removedFromMediaSource()
488 {
489     destroyRenderers();
490
491     if (m_mediaSource)
492         m_mediaSource->removeSourceBuffer(this);
493 }
494
495 MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
496 {
497     return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
498 }
499
500 void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
501 {
502     if (m_mediaSource)
503         m_mediaSource->player()->setReadyState(readyState);
504 }
505
506 void SourceBufferPrivateAVFObjC::evictCodedFrames()
507 {
508     notImplemented();
509 }
510
511 bool SourceBufferPrivateAVFObjC::isFull()
512 {
513     notImplemented();
514     return false;
515 }
516
517
518 bool SourceBufferPrivateAVFObjC::hasVideo() const
519 {
520     if (!m_client)
521         return false;
522
523     return m_client->sourceBufferPrivateHasVideo(this);
524 }
525
526 bool SourceBufferPrivateAVFObjC::hasAudio() const
527 {
528     if (!m_client)
529         return false;
530
531     return m_client->sourceBufferPrivateHasAudio(this);
532 }
533
534 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
535 {
536     int trackID = track->trackID();
537     if (!track->selected() && m_enabledVideoTrackID == trackID) {
538         m_enabledVideoTrackID = -1;
539         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
540         if (m_mediaSource)
541             m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
542     } else if (track->selected()) {
543         m_enabledVideoTrackID = trackID;
544         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
545         if (!m_displayLayer) {
546             m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
547             [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
548                 didBecomeReadyForMoreSamples(trackID);
549             }];
550         }
551         if (m_mediaSource)
552             m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
553     }
554 }
555
556 void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
557 {
558     int trackID = track->trackID();
559
560     if (!track->enabled()) {
561         AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
562         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
563         if (m_mediaSource)
564             m_mediaSource->player()->removeAudioRenderer(renderer);
565     } else {
566         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
567         AVSampleBufferAudioRenderer* renderer;
568         if (!m_audioRenderers.count(trackID)) {
569             renderer = [[getAVSampleBufferAudioRendererClass() alloc] init];
570             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
571                 didBecomeReadyForMoreSamples(trackID);
572             }];
573             m_audioRenderers[trackID] = renderer;
574         } else
575             renderer = m_audioRenderers[trackID].get();
576
577         if (m_mediaSource)
578             m_mediaSource->player()->addAudioRenderer(renderer);
579     }
580 }
581
582 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
583 {
584     CMSampleBufferRef newSampleBuffer = 0;
585     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
586     if (!newSampleBuffer)
587         return sampleBuffer;
588
589     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
590     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
591         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
592         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
593     }
594
595     return adoptCF(newSampleBuffer);
596 }
597
598 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
599 {
600     int trackID = trackIDString.toInt();
601     LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
602
603     if (trackID == m_enabledVideoTrackID)
604         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
605     else if (m_audioRenderers.count(trackID))
606         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
607 }
608
609 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
610 {
611     [renderer flush];
612
613     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
614         RefPtr<MediaSample>& mediaSample = *it;
615
616         PlatformSample platformSample = mediaSample->platformSample();
617         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
618
619         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
620
621         [renderer enqueueSampleBuffer:sampleBuffer.get()];
622     }
623 }
624
625 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
626 {
627     [layer flush];
628
629     for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
630         RefPtr<MediaSample>& mediaSample = *it;
631
632         PlatformSample platformSample = mediaSample->platformSample();
633         ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
634
635         RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
636
637         [layer enqueueSampleBuffer:sampleBuffer.get()];
638     }
639
640     if (m_mediaSource)
641         m_mediaSource->player()->setHasAvailableVideoFrame(false);
642 }
643
644 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
645 {
646     int trackID = trackIDString.toInt();
647     if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
648         return;
649
650     RefPtr<MediaSample> mediaSample = prpMediaSample;
651
652     PlatformSample platformSample = mediaSample->platformSample();
653     if (platformSample.type != PlatformSample::CMSampleBufferType)
654         return;
655
656     if (trackID == m_enabledVideoTrackID) {
657         [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
658         if (m_mediaSource)
659             m_mediaSource->player()->setHasAvailableVideoFrame(true);
660     } else
661         [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
662 }
663
664 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
665 {
666     int trackID = trackIDString.toInt();
667     if (trackID == m_enabledVideoTrackID)
668         return [m_displayLayer isReadyForMoreMediaData];
669     else if (m_audioRenderers.count(trackID))
670         return [m_audioRenderers[trackID] isReadyForMoreMediaData];
671     else
672         ASSERT_NOT_REACHED();
673
674     return false;
675 }
676
677 void SourceBufferPrivateAVFObjC::setActive(bool isActive)
678 {
679     if (m_mediaSource)
680         m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
681 }
682
683 MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
684 {
685     if (m_client)
686         return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
687     return time;
688 }
689
690 void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
691 {
692     if (m_client)
693         m_client->sourceBufferPrivateSeekToTime(this, time);
694 }
695
696 IntSize SourceBufferPrivateAVFObjC::naturalSize()
697 {
698     for (auto& videoTrack : m_videoTracks) {
699         if (videoTrack->selected())
700             return videoTrack->naturalSize();
701     }
702
703     return IntSize();
704 }
705
706 void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
707 {
708     if (trackID == m_enabledVideoTrackID)
709         [m_displayLayer stopRequestingMediaData];
710     else if (m_audioRenderers.count(trackID))
711         [m_audioRenderers[trackID] stopRequestingMediaData];
712     else {
713         ASSERT_NOT_REACHED();
714         return;
715     }
716
717     if (m_client)
718         m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
719 }
720
721 void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
722 {
723     int trackID = trackIDString.toInt();
724     if (trackID == m_enabledVideoTrackID) {
725         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
726             didBecomeReadyForMoreSamples(trackID);
727         }];
728     } else if (m_audioRenderers.count(trackID)) {
729         [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
730             didBecomeReadyForMoreSamples(trackID);
731         }];
732     } else
733         ASSERT_NOT_REACHED();
734 }
735
736 }
737
738 #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)