Include 'video/*' in image request Accept header if browser supports video media...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / ImageDecoderAVFObjC.mm
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "ImageDecoderAVFObjC.h"
28
29 #if HAVE(AVSAMPLEBUFFERGENERATOR)
30
31 #import "AVFoundationMIMETypeCache.h"
32 #import "AffineTransform.h"
33 #import "ContentType.h"
34 #import "FloatQuad.h"
35 #import "FloatRect.h"
36 #import "FloatSize.h"
37 #import "Logging.h"
38 #import "MIMETypeRegistry.h"
39 #import "SharedBuffer.h"
40 #import "UTIUtilities.h"
41 #import "WebCoreDecompressionSession.h"
42 #import <AVFoundation/AVAsset.h>
43 #import <AVFoundation/AVAssetResourceLoader.h>
44 #import <AVFoundation/AVAssetTrack.h>
45 #import <AVFoundation/AVSampleBufferGenerator.h>
46 #import <AVFoundation/AVSampleCursor.h>
47 #import <AVFoundation/AVTime.h>
48 #import <VideoToolbox/VTUtilities.h>
49 #import <map>
50 #import <pal/avfoundation/MediaTimeAVFoundation.h>
51 #import <wtf/MainThread.h>
52 #import <wtf/MediaTime.h>
53 #import <wtf/NeverDestroyed.h>
54 #import <wtf/OSObjectPtr.h>
55 #import <wtf/SoftLinking.h>
56 #import <wtf/Vector.h>
57
58 #import <pal/cf/CoreMediaSoftLink.h>
59 #import "VideoToolboxSoftLink.h"
60
61 #pragma mark - Soft Linking
62
63 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
64 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
65 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferGenerator)
66 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRequest)
67 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaCharacteristicVisual, NSString *)
68 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
69 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
70 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
71 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
72 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
73
74 #pragma mark -
75
76 @interface WebCoreSharedBufferResourceLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
77     WebCore::ImageDecoderAVFObjC* _parent;
78     long long _expectedContentSize;
79     RetainPtr<NSData> _data;
80     bool _complete;
81     Vector<RetainPtr<AVAssetResourceLoadingRequest>> _requests;
82     Lock _dataLock;
83 }
84 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent;
85 - (void)setExpectedContentSize:(long long)expectedContentSize;
86 - (void)updateData:(NSData *)data complete:(BOOL)complete;
87 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
88 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
89 - (void)fulfillPendingRequests;
90 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
91 @end
92
93 @implementation WebCoreSharedBufferResourceLoaderDelegate
94 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent
95 {
96     if (!(self = [super init]))
97         return nil;
98     _parent = parent;
99
100     return self;
101 }
102
103 - (void)setExpectedContentSize:(long long)expectedContentSize
104 {
105     LockHolder holder { _dataLock };
106     _expectedContentSize = expectedContentSize;
107
108     [self fulfillPendingRequests];
109 }
110
111 - (void)updateData:(NSData *)data complete:(BOOL)complete
112 {
113     LockHolder holder { _dataLock };
114     _data = data;
115     _complete = complete;
116
117     [self fulfillPendingRequests];
118 }
119
120 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)request
121 {
122     if (!request)
123         return NO;
124
125     if (request.finished || request.cancelled)
126         return NO;
127
128     // AVURLAsset's resource loader requires knowing the expected content size
129     // to load sucessfully. That requires either having the complete data for
130     // the resource, or knowing the expected content size. 
131     if (!_complete && !_expectedContentSize)
132         return NO;
133
134     if (auto dataRequest = request.dataRequest) {
135         if (dataRequest.requestedOffset > static_cast<long long>(_data.get().length))
136             return NO;
137     }
138
139     return YES;
140 }
141
142 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest
143 {
144     ASSERT(!_requests.contains(loadingRequest));
145     _requests.append(loadingRequest);
146 }
147
148 - (void)fulfillPendingRequests
149 {
150     for (auto& request : _requests) {
151         if ([self canFulfillRequest:request.get()])
152             [self fulfillRequest:request.get()];
153     }
154
155     _requests.removeAllMatching([] (auto& request) {
156         return request.get().finished;
157     });
158 }
159
160 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)request
161 {
162     if (auto infoRequest = request.contentInformationRequest) {
163         infoRequest.contentType = _parent->uti();
164         infoRequest.byteRangeAccessSupported = YES;
165         infoRequest.contentLength = _complete ? _data.get().length : _expectedContentSize;
166     }
167
168     if (auto dataRequest = request.dataRequest) {
169         long long availableLength = _data.get().length - dataRequest.requestedOffset;
170         if (availableLength <= 0)
171             return;
172
173         long long requestedLength;
174         if (dataRequest.requestsAllDataToEndOfResource)
175             requestedLength = availableLength;
176         else
177             requestedLength = std::min<long long>(availableLength, dataRequest.requestedLength);
178
179         auto range = NSMakeRange(static_cast<NSUInteger>(dataRequest.requestedOffset), static_cast<NSUInteger>(requestedLength));
180         NSData* requestedData = [_data subdataWithRange:range];
181         if (!requestedData)
182             return;
183
184         [dataRequest respondWithData:requestedData];
185
186         if (dataRequest.requestsAllDataToEndOfResource) {
187             if (!_complete)
188                 return;
189         } else if (dataRequest.requestedOffset + dataRequest.requestedLength > dataRequest.currentOffset)
190             return;
191     }
192
193     [request finishLoading];
194 }
195
196 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
197 {
198     LockHolder holder { _dataLock };
199
200     UNUSED_PARAM(resourceLoader);
201
202     if ([self canFulfillRequest:loadingRequest]) {
203         [self fulfillRequest:loadingRequest];
204         if (loadingRequest.finished)
205             return NO;
206     }
207
208     [self enqueueRequest:loadingRequest];
209     return YES;
210 }
211
212 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
213 {
214     LockHolder holder { _dataLock };
215
216     UNUSED_PARAM(resourceLoader);
217     _requests.removeAll(loadingRequest);
218 }
219 @end
220
221 namespace WebCore {
222
223 #pragma mark - Static Methods
224
225 static NSURL *customSchemeURL()
226 {
227     static NeverDestroyed<RetainPtr<NSURL>> url;
228     if (!url.get())
229         url.get() = adoptNS([[NSURL alloc] initWithString:@"custom-imagedecoderavfobjc://resource"]);
230
231     return url.get().get();
232 }
233
234 static NSDictionary *imageDecoderAssetOptions()
235 {
236     static NeverDestroyed<RetainPtr<NSDictionary>> options;
237     if (!options.get()) {
238         options.get() = @{
239             AVURLAssetReferenceRestrictionsKey: @(AVAssetReferenceRestrictionForbidAll),
240             AVURLAssetUsesNoPersistentCacheKey: @YES,
241         };
242     }
243
244     return options.get().get();
245 }
246
247 static ImageDecoderAVFObjC::RotationProperties transformToRotationProperties(AffineTransform inTransform)
248 {
249     ImageDecoderAVFObjC::RotationProperties rotation;
250     if (inTransform.isIdentity())
251         return rotation;
252
253     AffineTransform::DecomposedType decomposed { };
254     if (!inTransform.decompose(decomposed))
255         return rotation;
256
257     rotation.flipY = WTF::areEssentiallyEqual(decomposed.scaleX, -1.);
258     rotation.flipX = WTF::areEssentiallyEqual(decomposed.scaleY, -1.);
259     auto degrees = rad2deg(decomposed.angle);
260     while (degrees < 0)
261         degrees += 360;
262
263     // Only support rotation in multiples of 90ยบ:
264     if (WTF::areEssentiallyEqual(fmod(degrees, 90.), 0.))
265         rotation.angle = clampToUnsigned(degrees);
266
267     return rotation;
268 }
269
270 struct ImageDecoderAVFObjC::SampleData {
271     Seconds duration { 0 };
272     bool hasAlpha { false };
273     IntSize frameSize;
274     RetainPtr<CMSampleBufferRef> sample;
275     RetainPtr<CGImageRef> image;
276     MediaTime decodeTime;
277     MediaTime presentationTime;
278 };
279
280 #pragma mark - ImageDecoderAVFObjC
281
282 RefPtr<ImageDecoderAVFObjC> ImageDecoderAVFObjC::create(SharedBuffer& data, const String& mimeType, AlphaOption alphaOption, GammaAndColorProfileOption gammaAndColorProfileOption)
283 {
284     // AVFoundation may not be available at runtime.
285     if (!getAVURLAssetClass())
286         return nullptr;
287
288     if (!canLoad_VideoToolbox_VTCreateCGImageFromCVPixelBuffer())
289         return nullptr;
290
291     return adoptRef(*new ImageDecoderAVFObjC(data, mimeType, alphaOption, gammaAndColorProfileOption));
292 }
293
294 ImageDecoderAVFObjC::ImageDecoderAVFObjC(SharedBuffer& data, const String& mimeType, AlphaOption, GammaAndColorProfileOption)
295     : ImageDecoder()
296     , m_mimeType(mimeType)
297     , m_uti(WebCore::UTIFromMIMEType(mimeType))
298     , m_asset(adoptNS([allocAVURLAssetInstance() initWithURL:customSchemeURL() options:imageDecoderAssetOptions()]))
299     , m_loader(adoptNS([[WebCoreSharedBufferResourceLoaderDelegate alloc] initWithParent:this]))
300     , m_decompressionSession(WebCoreDecompressionSession::createRGB())
301 {
302     [m_loader updateData:data.createNSData().get() complete:NO];
303
304     [m_asset.get().resourceLoader setDelegate:m_loader.get() queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
305     [m_asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
306         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
307             protectedThis->setTrack(protectedThis->firstEnabledTrack());
308         });
309     }];
310 }
311
312 ImageDecoderAVFObjC::~ImageDecoderAVFObjC() = default;
313
314 bool ImageDecoderAVFObjC::supportsMediaType(MediaType type)
315 {
316     if (type == MediaType::Video)
317         return getAVURLAssetClass() && canLoad_VideoToolbox_VTCreateCGImageFromCVPixelBuffer();
318     return false;
319 }
320
321 bool ImageDecoderAVFObjC::supportsContentType(const ContentType& type)
322 {
323     if (getAVURLAssetClass() && canLoad_VideoToolbox_VTCreateCGImageFromCVPixelBuffer())
324         return AVFoundationMIMETypeCache::singleton().types().contains(type.containerType());
325     return false;
326 }
327
328 bool ImageDecoderAVFObjC::canDecodeType(const String& mimeType)
329 {
330     if (!supportsMediaType(MediaType::Video))
331         return nullptr;
332
333     return [getAVURLAssetClass() isPlayableExtendedMIMEType:mimeType];
334 }
335
336 AVAssetTrack *ImageDecoderAVFObjC::firstEnabledTrack()
337 {
338     NSArray<AVAssetTrack *> *videoTracks = [m_asset tracksWithMediaCharacteristic:AVMediaCharacteristicVisual];
339     NSUInteger firstEnabledIndex = [videoTracks indexOfObjectPassingTest:^(AVAssetTrack *track, NSUInteger, BOOL*) {
340         return track.enabled;
341     }];
342
343     if (firstEnabledIndex == NSNotFound) {
344         LOG(Images, "ImageDecoderAVFObjC::firstEnabledTrack(%p) - asset has no enabled video tracks", this);
345         return nil;
346     }
347
348     return [videoTracks objectAtIndex:firstEnabledIndex];
349 }
350
351 void ImageDecoderAVFObjC::readSampleMetadata()
352 {
353     if (!m_sampleData.isEmpty())
354         return;
355
356     // NOTE: there is no API to return the number of samples in the sample table. Instead,
357     // simply increment the sample in decode order by an arbitrarily large number.
358     RetainPtr<AVSampleCursor> cursor = [m_track makeSampleCursorAtFirstSampleInDecodeOrder];
359     int64_t sampleCount = 0;
360     if (cursor)
361         sampleCount = 1 + [cursor stepInDecodeOrderByCount:std::numeric_limits<int32_t>::max()];
362
363     // NOTE: there is no API to return the first sample cursor in presentation order. Instead,
364     // simply decrement sample in presentation order by an arbitrarily large number.
365     [cursor stepInPresentationOrderByCount:std::numeric_limits<int32_t>::min()];
366
367     ASSERT(sampleCount >= 0);
368     m_sampleData.resize(static_cast<size_t>(sampleCount));
369
370     if (!m_generator)
371         m_generator = adoptNS([allocAVSampleBufferGeneratorInstance() initWithAsset:m_asset.get() timebase:nil]);
372
373     for (size_t index = 0; index < static_cast<size_t>(sampleCount); ++index) {
374         auto& sampleData = m_sampleData[index];
375         sampleData.duration = Seconds(PAL::CMTimeGetSeconds([cursor currentSampleDuration]));
376         sampleData.decodeTime = PAL::toMediaTime([cursor decodeTimeStamp]);
377         sampleData.presentationTime = PAL::toMediaTime([cursor presentationTimeStamp]);
378         auto request = adoptNS([allocAVSampleBufferRequestInstance() initWithStartCursor:cursor.get()]);
379         sampleData.sample = adoptCF([m_generator createSampleBufferForRequest:request.get()]);
380         m_presentationTimeToIndex.insert(std::make_pair(sampleData.presentationTime, index));
381         [cursor stepInPresentationOrderByCount:1];
382     }
383 }
384
385 void ImageDecoderAVFObjC::readTrackMetadata()
386 {
387     if (!m_rotation)
388         m_rotation = transformToRotationProperties(CGAffineTransformConcat(m_asset.get().preferredTransform, m_track.get().preferredTransform));
389
390     if (!m_size) {
391         auto size = FloatSize(m_track.get().naturalSize);
392         auto angle = m_rotation.value().angle;
393         if (angle == 90 || angle == 270)
394             size = size.transposedSize();
395
396         m_size = expandedIntSize(size);
397     }
398 }
399
400 bool ImageDecoderAVFObjC::storeSampleBuffer(CMSampleBufferRef sampleBuffer)
401 {
402     auto pixelBuffer = m_decompressionSession->decodeSampleSync(sampleBuffer);
403     if (!pixelBuffer) {
404         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not decode sampleBuffer", this);
405         return false;
406     }
407
408     auto presentationTime = PAL::toMediaTime(PAL::CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
409     auto indexIter = m_presentationTimeToIndex.find(presentationTime);
410
411     if (m_rotation && !m_rotation.value().isIdentity()) {
412         auto& rotation = m_rotation.value();
413         if (!m_rotationSession) {
414             VTImageRotationSessionRef rawRotationSession = nullptr;
415             VTImageRotationSessionCreate(kCFAllocatorDefault, rotation.angle, &rawRotationSession);
416             m_rotationSession = rawRotationSession;
417             VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_EnableHighSpeedTransfer, kCFBooleanTrue);
418
419             if (rotation.flipY)
420                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipVerticalOrientation, kCFBooleanTrue);
421             if (rotation.flipX)
422                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipHorizontalOrientation, kCFBooleanTrue);
423         }
424
425         if (!m_rotationPool) {
426             auto pixelAttributes = (CFDictionaryRef)@{
427                 (NSString *)kCVPixelBufferWidthKey: @(m_size.value().width()),
428                 (NSString *)kCVPixelBufferHeightKey: @(m_size.value().height()),
429                 (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
430                 (NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
431             };
432             CVPixelBufferPoolRef rawPool = nullptr;
433             CVPixelBufferPoolCreate(kCFAllocatorDefault, nullptr, pixelAttributes, &rawPool);
434             m_rotationPool = adoptCF(rawPool);
435         }
436
437         CVPixelBufferRef rawRotatedBuffer = nullptr;
438         CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, m_rotationPool.get(), &rawRotatedBuffer);
439         auto status = VTImageRotationSessionTransferImage(m_rotationSession.get(), pixelBuffer.get(), rawRotatedBuffer);
440         if (status == noErr)
441             pixelBuffer = adoptCF(rawRotatedBuffer);
442     }
443
444     CGImageRef rawImage = nullptr;
445     if (noErr != VTCreateCGImageFromCVPixelBuffer(pixelBuffer.get(), nullptr, &rawImage)) {
446         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not create CGImage from pixelBuffer", this);
447         return false;
448     }
449
450     ASSERT(indexIter->second < m_sampleData.size());
451     auto& sampleData = m_sampleData[indexIter->second];
452     sampleData.image = adoptCF(rawImage);
453     sampleData.sample = nullptr;
454
455     auto alphaInfo = CGImageGetAlphaInfo(rawImage);
456     sampleData.hasAlpha = (alphaInfo != kCGImageAlphaNone && alphaInfo != kCGImageAlphaNoneSkipLast && alphaInfo != kCGImageAlphaNoneSkipFirst);
457
458     return true;
459 }
460
461 void ImageDecoderAVFObjC::advanceCursor()
462 {
463     if (![m_cursor stepInDecodeOrderByCount:1])
464         m_cursor = [m_track makeSampleCursorAtFirstSampleInDecodeOrder];
465 }
466
467 void ImageDecoderAVFObjC::setTrack(AVAssetTrack *track)
468 {
469     if (m_track == track)
470         return;
471     m_track = track;
472
473     LockHolder holder { m_sampleGeneratorLock };
474     m_sampleData.clear();
475     m_size.reset();
476     m_rotation.reset();
477     m_cursor = nullptr;
478     m_generator = nullptr;
479     m_rotationSession = nullptr;
480
481     [track loadValuesAsynchronouslyForKeys:@[@"naturalSize", @"preferredTransform"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
482         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
483             protectedThis->readTrackMetadata();
484             protectedThis->readSampleMetadata();
485         });
486     }];
487 }
488
489 EncodedDataStatus ImageDecoderAVFObjC::encodedDataStatus() const
490 {
491     if (m_sampleData.isEmpty())
492         return EncodedDataStatus::Unknown;
493     return EncodedDataStatus::Complete;
494 }
495
496 IntSize ImageDecoderAVFObjC::size() const
497 {
498     if (m_size)
499         return m_size.value();
500     return IntSize();
501 }
502
503 size_t ImageDecoderAVFObjC::frameCount() const
504 {
505     return m_sampleData.size();
506 }
507
508 RepetitionCount ImageDecoderAVFObjC::repetitionCount() const
509 {
510     // In the absence of instructions to the contrary, assume all media formats repeat infinitely.
511     // FIXME: Future media formats may embed repeat count information, and when that is available
512     // through AVAsset, account for it here.
513     return RepetitionCountInfinite;
514 }
515
516 String ImageDecoderAVFObjC::uti() const
517 {
518     return m_uti;
519 }
520
521 String ImageDecoderAVFObjC::filenameExtension() const
522 {
523     return MIMETypeRegistry::getPreferredExtensionForMIMEType(m_mimeType);
524 }
525
526 IntSize ImageDecoderAVFObjC::frameSizeAtIndex(size_t, SubsamplingLevel) const
527 {
528     return size();
529 }
530
531 bool ImageDecoderAVFObjC::frameIsCompleteAtIndex(size_t index) const
532 {
533     if (index >= m_sampleData.size())
534         return false;
535
536     auto sampleData = m_sampleData[index];
537     if (!sampleData.sample)
538         return false;
539
540     return PAL::CMSampleBufferDataIsReady(sampleData.sample.get());
541 }
542
543 ImageOrientation ImageDecoderAVFObjC::frameOrientationAtIndex(size_t) const
544 {
545     return ImageOrientation();
546 }
547
548 Seconds ImageDecoderAVFObjC::frameDurationAtIndex(size_t index) const
549 {
550     if (index < m_sampleData.size())
551         return m_sampleData[index].duration;
552     return { };
553 }
554
555 bool ImageDecoderAVFObjC::frameHasAlphaAtIndex(size_t index) const
556 {
557     if (index < m_sampleData.size())
558         return m_sampleData[index].hasAlpha;
559     return false;
560 }
561
562 bool ImageDecoderAVFObjC::frameAllowSubsamplingAtIndex(size_t index) const
563 {
564     return index <= m_sampleData.size();
565 }
566
567 unsigned ImageDecoderAVFObjC::frameBytesAtIndex(size_t index, SubsamplingLevel subsamplingLevel) const
568 {
569     if (!frameIsCompleteAtIndex(index))
570         return 0;
571
572     IntSize frameSize = frameSizeAtIndex(index, subsamplingLevel);
573     return (frameSize.area() * 4).unsafeGet();
574 }
575
576 NativeImagePtr ImageDecoderAVFObjC::createFrameImageAtIndex(size_t index, SubsamplingLevel, const DecodingOptions&)
577 {
578     LockHolder holder { m_sampleGeneratorLock };
579
580     if (index >= m_sampleData.size())
581         return nullptr;
582
583     auto& sampleData = m_sampleData[index];
584     if (sampleData.image)
585         return sampleData.image;
586
587     if (!m_cursor)
588         m_cursor = [m_track makeSampleCursorAtFirstSampleInDecodeOrder];
589
590     auto frameCursor = [m_track makeSampleCursorWithPresentationTimeStamp:PAL::toCMTime(sampleData.presentationTime)];
591     if ([frameCursor comparePositionInDecodeOrderWithPositionOfCursor:m_cursor.get()] == NSOrderedAscending)  {
592         // Rewind cursor to the last sync sample to begin decoding
593         m_cursor = adoptNS([frameCursor copy]);
594         do {
595             if ([m_cursor currentSampleSyncInfo].sampleIsFullSync)
596                 break;
597         } while ([m_cursor stepInDecodeOrderByCount:-1] == -1);
598
599     }
600
601     if (!m_generator)
602         m_generator = adoptNS([allocAVSampleBufferGeneratorInstance() initWithAsset:m_asset.get() timebase:nil]);
603
604     RetainPtr<CGImageRef> image;
605     while (true) {
606         if ([frameCursor comparePositionInDecodeOrderWithPositionOfCursor:m_cursor.get()] == NSOrderedAscending)
607             return nullptr;
608
609         auto presentationTime = PAL::toMediaTime(m_cursor.get().presentationTimeStamp);
610         auto indexIter = m_presentationTimeToIndex.find(presentationTime);
611
612         if (indexIter == m_presentationTimeToIndex.end())
613             break;
614
615         auto& cursorSampleData = m_sampleData[indexIter->second];
616
617         if (!cursorSampleData.sample) {
618             auto request = adoptNS([allocAVSampleBufferRequestInstance() initWithStartCursor:m_cursor.get()]);
619             cursorSampleData.sample = adoptCF([m_generator createSampleBufferForRequest:request.get()]);
620         }
621
622         if (!cursorSampleData.sample)
623             break;
624
625         if (!storeSampleBuffer(cursorSampleData.sample.get()))
626             break;
627
628         advanceCursor();
629         if (sampleData.image)
630             return sampleData.image;
631     }
632
633     advanceCursor();
634     return nullptr;
635 }
636
637 void ImageDecoderAVFObjC::setExpectedContentSize(long long expectedContentSize)
638 {
639     m_loader.get().expectedContentSize = expectedContentSize;
640 }
641
642 void ImageDecoderAVFObjC::setData(SharedBuffer& data, bool allDataReceived)
643 {
644     [m_loader updateData:data.createNSData().get() complete:allDataReceived];
645
646     if (allDataReceived) {
647         m_isAllDataReceived = true;
648
649         if (!m_track)
650             setTrack(firstEnabledTrack());
651
652         readTrackMetadata();
653         readSampleMetadata();
654     }
655 }
656
657 void ImageDecoderAVFObjC::clearFrameBufferCache(size_t index)
658 {
659     for (size_t i = 0; i < index; ++i)
660         m_sampleData[i].image = nullptr;
661 }
662
663 }
664
665 #endif