d3f547f99c46f5de47165e05f441462b374eaa44
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / ImageDecoderAVFObjC.mm
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "ImageDecoderAVFObjC.h"
28
29 #if HAVE(AVASSETREADER)
30
31 #import "AVFoundationMIMETypeCache.h"
32 #import "AffineTransform.h"
33 #import "ContentType.h"
34 #import "FloatQuad.h"
35 #import "FloatRect.h"
36 #import "FloatSize.h"
37 #import "Logging.h"
38 #import "MIMETypeRegistry.h"
39 #import "MediaSampleAVFObjC.h"
40 #import "SharedBuffer.h"
41 #import "UTIUtilities.h"
42 #import "WebCoreDecompressionSession.h"
43 #import <AVFoundation/AVAsset.h>
44 #import <AVFoundation/AVAssetReader.h>
45 #import <AVFoundation/AVAssetReaderOutput.h>
46 #import <AVFoundation/AVAssetResourceLoader.h>
47 #import <AVFoundation/AVAssetTrack.h>
48 #import <AVFoundation/AVTime.h>
49 #import <VideoToolbox/VTUtilities.h>
50 #import <pal/avfoundation/MediaTimeAVFoundation.h>
51 #import <wtf/MainThread.h>
52 #import <wtf/MediaTime.h>
53 #import <wtf/NeverDestroyed.h>
54 #import <wtf/Optional.h>
55 #import <wtf/Vector.h>
56
57 #import "CoreVideoSoftLink.h"
58 #import "VideoToolboxSoftLink.h"
59 #import <pal/cf/CoreMediaSoftLink.h>
60 #import <pal/cocoa/AVFoundationSoftLink.h>
61
62 #pragma mark -
63
64 @interface WebCoreSharedBufferResourceLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
65     WebCore::ImageDecoderAVFObjC* _parent;
66     long long _expectedContentSize;
67     RetainPtr<NSData> _data;
68     bool _complete;
69     Vector<RetainPtr<AVAssetResourceLoadingRequest>> _requests;
70     Lock _dataLock;
71 }
72 @property (readonly) NSData* data;
73 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent;
74 - (void)setExpectedContentSize:(long long)expectedContentSize;
75 - (void)updateData:(NSData *)data complete:(BOOL)complete;
76 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
77 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
78 - (void)fulfillPendingRequests;
79 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
80 @end
81
82 @implementation WebCoreSharedBufferResourceLoaderDelegate
83 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent
84 {
85     if (!(self = [super init]))
86         return nil;
87     _parent = parent;
88
89     return self;
90 }
91
92 - (NSData*)data
93 {
94     return _data.get();
95 }
96
97 - (void)setExpectedContentSize:(long long)expectedContentSize
98 {
99     LockHolder holder { _dataLock };
100     _expectedContentSize = expectedContentSize;
101
102     [self fulfillPendingRequests];
103 }
104
105 - (void)updateData:(NSData *)data complete:(BOOL)complete
106 {
107     LockHolder holder { _dataLock };
108     _data = data;
109     _complete = complete;
110
111     [self fulfillPendingRequests];
112 }
113
114 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)request
115 {
116     if (!request)
117         return NO;
118
119     if (request.finished || request.cancelled)
120         return NO;
121
122     // AVURLAsset's resource loader requires knowing the expected content size
123     // to load sucessfully. That requires either having the complete data for
124     // the resource, or knowing the expected content size. 
125     if (!_complete && !_expectedContentSize)
126         return NO;
127
128     if (auto dataRequest = request.dataRequest) {
129         if (dataRequest.requestedOffset > static_cast<long long>(_data.get().length))
130             return NO;
131     }
132
133     return YES;
134 }
135
136 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest
137 {
138     ASSERT(!_requests.contains(loadingRequest));
139     _requests.append(loadingRequest);
140 }
141
142 - (void)fulfillPendingRequests
143 {
144     for (auto& request : _requests) {
145         if ([self canFulfillRequest:request.get()])
146             [self fulfillRequest:request.get()];
147     }
148
149     _requests.removeAllMatching([] (auto& request) {
150         return request.get().finished;
151     });
152 }
153
154 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)request
155 {
156     if (auto infoRequest = request.contentInformationRequest) {
157         infoRequest.contentType = _parent->uti();
158         infoRequest.byteRangeAccessSupported = YES;
159         infoRequest.contentLength = _complete ? _data.get().length : _expectedContentSize;
160     }
161
162     if (auto dataRequest = request.dataRequest) {
163         long long availableLength = _data.get().length - dataRequest.requestedOffset;
164         if (availableLength <= 0)
165             return;
166
167         long long requestedLength;
168         if (dataRequest.requestsAllDataToEndOfResource)
169             requestedLength = availableLength;
170         else
171             requestedLength = std::min<long long>(availableLength, dataRequest.requestedLength);
172
173         auto range = NSMakeRange(static_cast<NSUInteger>(dataRequest.requestedOffset), static_cast<NSUInteger>(requestedLength));
174         NSData* requestedData = [_data subdataWithRange:range];
175         if (!requestedData)
176             return;
177
178         [dataRequest respondWithData:requestedData];
179
180         if (dataRequest.requestsAllDataToEndOfResource) {
181             if (!_complete)
182                 return;
183         } else if (dataRequest.requestedOffset + dataRequest.requestedLength > dataRequest.currentOffset)
184             return;
185     }
186
187     [request finishLoading];
188 }
189
190 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
191 {
192     LockHolder holder { _dataLock };
193
194     UNUSED_PARAM(resourceLoader);
195
196     if ([self canFulfillRequest:loadingRequest]) {
197         [self fulfillRequest:loadingRequest];
198         if (loadingRequest.finished)
199             return YES;
200     }
201
202     [self enqueueRequest:loadingRequest];
203     return YES;
204 }
205
206 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
207 {
208     LockHolder holder { _dataLock };
209
210     UNUSED_PARAM(resourceLoader);
211     _requests.removeAll(loadingRequest);
212 }
213 @end
214
215 namespace WebCore {
216
217 #pragma mark - Static Methods
218
219 static NSURL *customSchemeURL()
220 {
221     static NSURL *url = [[NSURL alloc] initWithString:@"custom-imagedecoderavfobjc://resource"];
222     return url;
223 }
224
225 static NSDictionary *imageDecoderAssetOptions()
226 {
227     static NSDictionary *options = [] {
228         return [@{
229             AVURLAssetReferenceRestrictionsKey: @(AVAssetReferenceRestrictionForbidAll),
230             AVURLAssetUsesNoPersistentCacheKey: @YES,
231         } retain];
232     }();
233     return options;
234 }
235
236 static ImageDecoderAVFObjC::RotationProperties transformToRotationProperties(AffineTransform inTransform)
237 {
238     ImageDecoderAVFObjC::RotationProperties rotation;
239     if (inTransform.isIdentity())
240         return rotation;
241
242     AffineTransform::DecomposedType decomposed { };
243     if (!inTransform.decompose(decomposed))
244         return rotation;
245
246     rotation.flipY = WTF::areEssentiallyEqual(decomposed.scaleX, -1.);
247     rotation.flipX = WTF::areEssentiallyEqual(decomposed.scaleY, -1.);
248     auto degrees = rad2deg(decomposed.angle);
249     while (degrees < 0)
250         degrees += 360;
251
252     // Only support rotation in multiples of 90ยบ:
253     if (WTF::areEssentiallyEqual(fmod(degrees, 90.), 0.))
254         rotation.angle = clampToUnsigned(degrees);
255
256     return rotation;
257 }
258
259 class ImageDecoderAVFObjCSample : public MediaSampleAVFObjC {
260 public:
261     static Ref<ImageDecoderAVFObjCSample> create(RetainPtr<CMSampleBufferRef>&& sampleBuffer)
262     {
263         return adoptRef(*new ImageDecoderAVFObjCSample(WTFMove(sampleBuffer)));
264     }
265
266     CGImageRef image() const { return m_image.get(); }
267     void setImage(RetainPtr<CGImageRef>&& image)
268     {
269         m_image = WTFMove(image);
270         if (!m_image) {
271             m_hasAlpha = false;
272             return;
273         }
274
275         auto alphaInfo = CGImageGetAlphaInfo(m_image.get());
276         m_hasAlpha = alphaInfo != kCGImageAlphaNone && alphaInfo != kCGImageAlphaNoneSkipLast && alphaInfo != kCGImageAlphaNoneSkipFirst;
277     }
278
279     struct ByteRange {
280         size_t byteOffset { 0 };
281         size_t byteLength { 0 };
282     };
283
284     Optional<ByteRange> byteRange() const
285     {
286         if (PAL::CMSampleBufferGetDataBuffer(m_sample.get())
287             || PAL::CMSampleBufferGetImageBuffer(m_sample.get())
288             || !PAL::CMSampleBufferDataIsReady(m_sample.get()))
289             return WTF::nullopt;
290
291         CFNumberRef byteOffsetCF = (CFNumberRef)PAL::CMGetAttachment(m_sample.get(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceByteOffset, nullptr);
292         if (!byteOffsetCF || CFGetTypeID(byteOffsetCF) != CFNumberGetTypeID())
293             return WTF::nullopt;
294
295         int64_t byteOffset { 0 };
296         if (!CFNumberGetValue(byteOffsetCF, kCFNumberSInt64Type, &byteOffset))
297             return WTF::nullopt;
298
299         CMItemCount sizeArrayEntries = 0;
300         PAL::CMSampleBufferGetSampleSizeArray(m_sample.get(), 0, nullptr, &sizeArrayEntries);
301         if (sizeArrayEntries != 1)
302             return WTF::nullopt;
303
304         size_t singleSizeEntry;
305         PAL::CMSampleBufferGetSampleSizeArray(m_sample.get(), 1, &singleSizeEntry, nullptr);
306         return {{static_cast<size_t>(byteOffset), singleSizeEntry}};
307     }
308
309     SampleFlags flags() const override
310     {
311         return (SampleFlags)(MediaSampleAVFObjC::flags() | (m_hasAlpha ? HasAlpha : 0));
312     }
313
314 private:
315     ImageDecoderAVFObjCSample(RetainPtr<CMSampleBufferRef>&& sample)
316         : MediaSampleAVFObjC(WTFMove(sample))
317     {
318     }
319
320     RetainPtr<CGImageRef> m_image;
321     bool m_hasAlpha { false };
322 };
323
324 static ImageDecoderAVFObjCSample* toSample(const PresentationOrderSampleMap::value_type& pair)
325 {
326     return (ImageDecoderAVFObjCSample*)pair.second.get();
327 }
328
329 template <typename Iterator>
330 ImageDecoderAVFObjCSample* toSample(Iterator iter)
331 {
332     return (ImageDecoderAVFObjCSample*)iter->second.get();
333 }
334
335 #pragma mark - ImageDecoderAVFObjC
336
337 RefPtr<ImageDecoderAVFObjC> ImageDecoderAVFObjC::create(SharedBuffer& data, const String& mimeType, AlphaOption alphaOption, GammaAndColorProfileOption gammaAndColorProfileOption)
338 {
339     // AVFoundation may not be available at runtime.
340     if (!AVFoundationMIMETypeCache::singleton().isAvailable())
341         return nullptr;
342
343     if (!canLoad_VideoToolbox_VTCreateCGImageFromCVPixelBuffer())
344         return nullptr;
345
346     return adoptRef(*new ImageDecoderAVFObjC(data, mimeType, alphaOption, gammaAndColorProfileOption));
347 }
348
349 ImageDecoderAVFObjC::ImageDecoderAVFObjC(SharedBuffer& data, const String& mimeType, AlphaOption, GammaAndColorProfileOption)
350     : ImageDecoder()
351     , m_mimeType(mimeType)
352     , m_uti(WebCore::UTIFromMIMEType(mimeType))
353     , m_asset(adoptNS([PAL::allocAVURLAssetInstance() initWithURL:customSchemeURL() options:imageDecoderAssetOptions()]))
354     , m_loader(adoptNS([[WebCoreSharedBufferResourceLoaderDelegate alloc] initWithParent:this]))
355     , m_decompressionSession(WebCoreDecompressionSession::createRGB())
356 {
357     [m_loader updateData:data.createNSData().get() complete:NO];
358
359     [m_asset.get().resourceLoader setDelegate:m_loader.get() queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
360     [m_asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
361         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
362             protectedThis->setTrack(protectedThis->firstEnabledTrack());
363         });
364     }];
365 }
366
367 ImageDecoderAVFObjC::~ImageDecoderAVFObjC() = default;
368
369 bool ImageDecoderAVFObjC::supportsMediaType(MediaType type)
370 {
371     return type == MediaType::Video && AVFoundationMIMETypeCache::singleton().isAvailable();
372 }
373
374 bool ImageDecoderAVFObjC::supportsContentType(const ContentType& type)
375 {
376     return AVFoundationMIMETypeCache::singleton().supportsContentType(type);
377 }
378
379 bool ImageDecoderAVFObjC::canDecodeType(const String& mimeType)
380 {
381     return AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType);
382 }
383
384 AVAssetTrack *ImageDecoderAVFObjC::firstEnabledTrack()
385 {
386     NSArray<AVAssetTrack *> *videoTracks = [m_asset tracksWithMediaCharacteristic:AVMediaCharacteristicVisual];
387     NSUInteger firstEnabledIndex = [videoTracks indexOfObjectPassingTest:^(AVAssetTrack *track, NSUInteger, BOOL*) {
388         return track.enabled;
389     }];
390
391     if (firstEnabledIndex == NSNotFound) {
392         LOG(Images, "ImageDecoderAVFObjC::firstEnabledTrack(%p) - asset has no enabled video tracks", this);
393         return nil;
394     }
395
396     return [videoTracks objectAtIndex:firstEnabledIndex];
397 }
398
399 void ImageDecoderAVFObjC::readSamples()
400 {
401     if (!m_sampleData.empty())
402         return;
403
404     auto assetReader = adoptNS([PAL::allocAVAssetReaderInstance() initWithAsset:m_asset.get() error:nil]);
405     auto referenceOutput = adoptNS([PAL::allocAVAssetReaderSampleReferenceOutputInstance() initWithTrack:m_track.get()]);
406
407     referenceOutput.get().alwaysCopiesSampleData = NO;
408     [assetReader addOutput:referenceOutput.get()];
409     [assetReader startReading];
410
411     while (auto sampleBuffer = adoptCF([referenceOutput copyNextSampleBuffer])) {
412         // NOTE: Some samples emitted by the AVAssetReader simply denote the boundary of edits
413         // and do not carry media data.
414         if (!(PAL::CMSampleBufferGetNumSamples(sampleBuffer.get())))
415             continue;
416         m_sampleData.addSample(ImageDecoderAVFObjCSample::create(WTFMove(sampleBuffer)).get());
417     }
418
419     if (m_encodedDataStatusChangedCallback)
420         m_encodedDataStatusChangedCallback(encodedDataStatus());
421 }
422
423 void ImageDecoderAVFObjC::readTrackMetadata()
424 {
425     if (!m_rotation)
426         m_rotation = transformToRotationProperties(CGAffineTransformConcat(m_asset.get().preferredTransform, m_track.get().preferredTransform));
427
428     if (!m_size) {
429         auto size = FloatSize(m_track.get().naturalSize);
430         auto angle = m_rotation.value().angle;
431         if (angle == 90 || angle == 270)
432             size = size.transposedSize();
433
434         m_size = expandedIntSize(size);
435     }
436 }
437
438 bool ImageDecoderAVFObjC::storeSampleBuffer(CMSampleBufferRef sampleBuffer)
439 {
440     auto pixelBuffer = m_decompressionSession->decodeSampleSync(sampleBuffer);
441     if (!pixelBuffer) {
442         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not decode sampleBuffer", this);
443         return false;
444     }
445
446     auto presentationTime = PAL::toMediaTime(PAL::CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
447     auto iter = m_sampleData.presentationOrder().findSampleWithPresentationTime(presentationTime);
448
449     if (m_rotation && !m_rotation.value().isIdentity()) {
450         auto& rotation = m_rotation.value();
451         if (!m_rotationSession) {
452             VTImageRotationSessionRef rawRotationSession = nullptr;
453             VTImageRotationSessionCreate(kCFAllocatorDefault, rotation.angle, &rawRotationSession);
454             m_rotationSession = rawRotationSession;
455             VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_EnableHighSpeedTransfer, kCFBooleanTrue);
456
457             if (rotation.flipY)
458                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipVerticalOrientation, kCFBooleanTrue);
459             if (rotation.flipX)
460                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipHorizontalOrientation, kCFBooleanTrue);
461         }
462
463         if (!m_rotationPool) {
464             auto pixelAttributes = @{
465                 (__bridge NSString *)kCVPixelBufferWidthKey: @(m_size.value().width()),
466                 (__bridge NSString *)kCVPixelBufferHeightKey: @(m_size.value().height()),
467                 (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
468                 (__bridge NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
469             };
470             CVPixelBufferPoolRef rawPool = nullptr;
471             CVPixelBufferPoolCreate(kCFAllocatorDefault, nullptr, (__bridge CFDictionaryRef)pixelAttributes, &rawPool);
472             m_rotationPool = adoptCF(rawPool);
473         }
474
475         CVPixelBufferRef rawRotatedBuffer = nullptr;
476         CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, m_rotationPool.get(), &rawRotatedBuffer);
477         auto status = VTImageRotationSessionTransferImage(m_rotationSession.get(), pixelBuffer.get(), rawRotatedBuffer);
478         if (status == noErr)
479             pixelBuffer = adoptCF(rawRotatedBuffer);
480     }
481
482     CGImageRef rawImage = nullptr;
483     if (noErr != VTCreateCGImageFromCVPixelBuffer(pixelBuffer.get(), nullptr, &rawImage)) {
484         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not create CGImage from pixelBuffer", this);
485         return false;
486     }
487
488     ASSERT(iter != m_sampleData.presentationOrder().end());
489     toSample(iter)->setImage(adoptCF(rawImage));
490
491     return true;
492 }
493
494 void ImageDecoderAVFObjC::advanceCursor()
495 {
496     if (m_cursor == m_sampleData.decodeOrder().end() || ++m_cursor == m_sampleData.decodeOrder().end())
497         m_cursor = m_sampleData.decodeOrder().begin();
498 }
499
500 void ImageDecoderAVFObjC::setTrack(AVAssetTrack *track)
501 {
502     if (m_track == track)
503         return;
504     m_track = track;
505
506     LockHolder holder { m_sampleGeneratorLock };
507     m_sampleData.clear();
508     m_size.reset();
509     m_rotation.reset();
510     m_cursor = m_sampleData.decodeOrder().end();
511     m_rotationSession = nullptr;
512
513     [track loadValuesAsynchronouslyForKeys:@[@"naturalSize", @"preferredTransform"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
514         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
515             protectedThis->readTrackMetadata();
516             protectedThis->readSamples();
517         });
518     }];
519 }
520
521 void ImageDecoderAVFObjC::setEncodedDataStatusChangeCallback(WTF::Function<void(EncodedDataStatus)>&& callback)
522 {
523     m_encodedDataStatusChangedCallback = WTFMove(callback);
524 }
525
526 EncodedDataStatus ImageDecoderAVFObjC::encodedDataStatus() const
527 {
528     if (!m_sampleData.empty())
529         return EncodedDataStatus::Complete;
530     if (m_size)
531         return EncodedDataStatus::SizeAvailable;
532     if (m_track)
533         return EncodedDataStatus::TypeAvailable;
534     return EncodedDataStatus::Unknown;
535 }
536
537 IntSize ImageDecoderAVFObjC::size() const
538 {
539     if (m_size)
540         return m_size.value();
541     return IntSize();
542 }
543
544 size_t ImageDecoderAVFObjC::frameCount() const
545 {
546     return m_sampleData.size();
547 }
548
549 RepetitionCount ImageDecoderAVFObjC::repetitionCount() const
550 {
551     // In the absence of instructions to the contrary, assume all media formats repeat infinitely.
552     // FIXME: Future media formats may embed repeat count information, and when that is available
553     // through AVAsset, account for it here.
554     return frameCount() > 1 ? RepetitionCountInfinite : RepetitionCountNone;
555 }
556
557 String ImageDecoderAVFObjC::uti() const
558 {
559     return m_uti;
560 }
561
562 String ImageDecoderAVFObjC::filenameExtension() const
563 {
564     return MIMETypeRegistry::getPreferredExtensionForMIMEType(m_mimeType);
565 }
566
567 IntSize ImageDecoderAVFObjC::frameSizeAtIndex(size_t, SubsamplingLevel) const
568 {
569     return size();
570 }
571
572 bool ImageDecoderAVFObjC::frameIsCompleteAtIndex(size_t index) const
573 {
574     auto* sampleData = sampleAtIndex(index);
575     if (!sampleData)
576         return false;
577
578     return sampleIsComplete(*sampleData);
579 }
580
581 ImageOrientation ImageDecoderAVFObjC::frameOrientationAtIndex(size_t) const
582 {
583     return ImageOrientation();
584 }
585
586 Seconds ImageDecoderAVFObjC::frameDurationAtIndex(size_t index) const
587 {
588     auto* sampleData = sampleAtIndex(index);
589     if (!sampleData)
590         return { };
591
592     return Seconds(sampleData->duration().toDouble());
593 }
594
595 bool ImageDecoderAVFObjC::frameHasAlphaAtIndex(size_t index) const
596 {
597     auto* sampleData = sampleAtIndex(index);
598     return sampleData ? sampleData->hasAlpha() : false;
599 }
600
601 bool ImageDecoderAVFObjC::frameAllowSubsamplingAtIndex(size_t index) const
602 {
603     return index <= m_sampleData.size();
604 }
605
606 unsigned ImageDecoderAVFObjC::frameBytesAtIndex(size_t index, SubsamplingLevel subsamplingLevel) const
607 {
608     if (!frameIsCompleteAtIndex(index))
609         return 0;
610
611     IntSize frameSize = frameSizeAtIndex(index, subsamplingLevel);
612     return (frameSize.area() * 4).unsafeGet();
613 }
614
615 NativeImagePtr ImageDecoderAVFObjC::createFrameImageAtIndex(size_t index, SubsamplingLevel, const DecodingOptions&)
616 {
617     LockHolder holder { m_sampleGeneratorLock };
618
619     auto* sampleData = sampleAtIndex(index);
620     if (!sampleData)
621         return nullptr;
622
623     if (auto image = sampleData->image())
624         return image;
625
626     if (m_cursor == m_sampleData.decodeOrder().end())
627         m_cursor = m_sampleData.decodeOrder().begin();
628
629     auto decodeTime = sampleData->decodeTime();
630
631     if (decodeTime < m_cursor->second->decodeTime()) {
632         // Rewind cursor to the last sync sample to begin decoding
633         m_cursor = m_sampleData.decodeOrder().findSampleWithDecodeKey({decodeTime, sampleData->presentationTime()});
634         do {
635             if (m_cursor->second->isSync())
636                 break;
637         } while (--m_cursor != m_sampleData.decodeOrder().begin());
638     }
639
640     RetainPtr<CGImageRef> image;
641     while (true) {
642         if (decodeTime < m_cursor->second->decodeTime())
643             return nullptr;
644
645         auto cursorSample = toSample(m_cursor);
646         if (!cursorSample)
647             return nullptr;
648
649         if (!sampleIsComplete(*cursorSample))
650             return nullptr;
651
652         if (auto byteRange = cursorSample->byteRange()) {
653             auto& byteRangeValue = byteRange.value();
654             auto* data = m_loader.get().data;
655             CMBlockBufferCustomBlockSource source {
656                 0,
657                 nullptr,
658                 [](void* refcon, void*, size_t) {
659                     [(id)refcon release];
660                 },
661                 [data retain]
662             };
663             CMBlockBufferRef rawBlockBuffer = nullptr;
664             if (noErr != PAL::CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, const_cast<void*>(data.bytes), data.length, nullptr, &source, byteRangeValue.byteOffset, byteRangeValue.byteLength, 0, &rawBlockBuffer))
665                 return nullptr;
666
667             if (!rawBlockBuffer)
668                 return nullptr;
669
670             if (noErr != PAL::CMSampleBufferSetDataBuffer(cursorSample->sampleBuffer(), rawBlockBuffer))
671                 return nullptr;
672             CFRelease(rawBlockBuffer);
673
674             PAL::CMRemoveAttachment(cursorSample->sampleBuffer(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceByteOffset);
675             PAL::CMRemoveAttachment(cursorSample->sampleBuffer(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceURL);
676         }
677
678         auto cursorSampleBuffer = cursorSample->sampleBuffer();
679         if (!cursorSampleBuffer)
680             break;
681
682         if (!storeSampleBuffer(cursorSampleBuffer))
683             break;
684
685         advanceCursor();
686         if (auto image = sampleData->image())
687             return image;
688     }
689
690     advanceCursor();
691     return nullptr;
692 }
693
694 void ImageDecoderAVFObjC::setExpectedContentSize(long long expectedContentSize)
695 {
696     m_loader.get().expectedContentSize = expectedContentSize;
697 }
698
699 void ImageDecoderAVFObjC::setData(SharedBuffer& data, bool allDataReceived)
700 {
701     [m_loader updateData:data.createNSData().get() complete:allDataReceived];
702
703     if (allDataReceived) {
704         m_isAllDataReceived = true;
705
706         if (!m_track)
707             setTrack(firstEnabledTrack());
708
709         if (!m_track)
710             return;
711
712         readTrackMetadata();
713         readSamples();
714     }
715 }
716
717 void ImageDecoderAVFObjC::clearFrameBufferCache(size_t index)
718 {
719     size_t i = 0;
720     for (auto& samplePair : m_sampleData.presentationOrder()) {
721         toSample(samplePair)->setImage(nullptr);
722         if (++i > index)
723             break;
724     }
725 }
726
727 const ImageDecoderAVFObjCSample* ImageDecoderAVFObjC::sampleAtIndex(size_t index) const
728 {
729     if (index >= m_sampleData.presentationOrder().size())
730         return nullptr;
731
732     // FIXME: std::map is not random-accessible; this can get expensive if callers repeatedly call
733     // with monotonically increasing indexes. Investigate adding an O(1) side structure to make this
734     // style of access faster.
735     auto iter = m_sampleData.presentationOrder().begin();
736     for (size_t i = 0; i != index; ++i)
737         ++iter;
738
739     return toSample(iter);
740 }
741
742 bool ImageDecoderAVFObjC::sampleIsComplete(const ImageDecoderAVFObjCSample& sample) const
743 {
744     if (auto byteRange = sample.byteRange()) {
745         auto& byteRangeValue = byteRange.value();
746         return byteRangeValue.byteOffset + byteRangeValue.byteLength <= m_loader.get().data.length;
747     }
748
749     return PAL::CMSampleBufferDataIsReady(sample.sampleBuffer());
750 }
751
752 }
753
754 #endif