Leak of one AVSampleCursor inside ImageDecoderAVFObjC::createFrameImageAtIndex()
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / ImageDecoderAVFObjC.mm
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "ImageDecoderAVFObjC.h"
28
29 #if HAVE(AVSAMPLEBUFFERGENERATOR)
30
31 #import "AffineTransform.h"
32 #import "FloatQuad.h"
33 #import "FloatRect.h"
34 #import "FloatSize.h"
35 #import "Logging.h"
36 #import "MIMETypeRegistry.h"
37 #import "SharedBuffer.h"
38 #import "UTIUtilities.h"
39 #import "WebCoreDecompressionSession.h"
40 #import <AVFoundation/AVAsset.h>
41 #import <AVFoundation/AVAssetResourceLoader.h>
42 #import <AVFoundation/AVAssetTrack.h>
43 #import <AVFoundation/AVSampleBufferGenerator.h>
44 #import <AVFoundation/AVSampleCursor.h>
45 #import <AVFoundation/AVTime.h>
46 #import <VideoToolbox/VTUtilities.h>
47 #import <map>
48 #import <pal/avfoundation/MediaTimeAVFoundation.h>
49 #import <wtf/MainThread.h>
50 #import <wtf/MediaTime.h>
51 #import <wtf/NeverDestroyed.h>
52 #import <wtf/OSObjectPtr.h>
53 #import <wtf/SoftLinking.h>
54 #import <wtf/Vector.h>
55
56 #import <pal/cf/CoreMediaSoftLink.h>
57 #import "VideoToolboxSoftLink.h"
58
59 #pragma mark - Soft Linking
60
61 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
62 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
63 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferGenerator)
64 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRequest)
65 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaCharacteristicVisual, NSString *)
66 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
67 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
68 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
69 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
70 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
71
72 #pragma mark -
73
74 @interface WebCoreSharedBufferResourceLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
75     WebCore::ImageDecoderAVFObjC* _parent;
76     long long _expectedContentSize;
77     RetainPtr<NSData> _data;
78     bool _complete;
79     Vector<RetainPtr<AVAssetResourceLoadingRequest>> _requests;
80     Lock _dataLock;
81 }
82 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent;
83 - (void)setExpectedContentSize:(long long)expectedContentSize;
84 - (void)updateData:(NSData *)data complete:(BOOL)complete;
85 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
86 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
87 - (void)fulfillPendingRequests;
88 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
89 @end
90
91 @implementation WebCoreSharedBufferResourceLoaderDelegate
92 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent
93 {
94     if (!(self = [super init]))
95         return nil;
96     _parent = parent;
97
98     return self;
99 }
100
101 - (void)setExpectedContentSize:(long long)expectedContentSize
102 {
103     LockHolder holder { _dataLock };
104     _expectedContentSize = expectedContentSize;
105
106     [self fulfillPendingRequests];
107 }
108
109 - (void)updateData:(NSData *)data complete:(BOOL)complete
110 {
111     LockHolder holder { _dataLock };
112     _data = data;
113     _complete = complete;
114
115     [self fulfillPendingRequests];
116 }
117
118 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)request
119 {
120     if (!request)
121         return NO;
122
123     if (request.finished || request.cancelled)
124         return NO;
125
126     // AVURLAsset's resource loader requires knowing the expected content size
127     // to load sucessfully. That requires either having the complete data for
128     // the resource, or knowing the expected content size. 
129     if (!_complete && !_expectedContentSize)
130         return NO;
131
132     if (auto dataRequest = request.dataRequest) {
133         if (dataRequest.requestedOffset > static_cast<long long>(_data.get().length))
134             return NO;
135     }
136
137     return YES;
138 }
139
140 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest
141 {
142     ASSERT(!_requests.contains(loadingRequest));
143     _requests.append(loadingRequest);
144 }
145
146 - (void)fulfillPendingRequests
147 {
148     for (auto& request : _requests) {
149         if ([self canFulfillRequest:request.get()])
150             [self fulfillRequest:request.get()];
151     }
152
153     _requests.removeAllMatching([] (auto& request) {
154         return request.get().finished;
155     });
156 }
157
158 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)request
159 {
160     if (auto infoRequest = request.contentInformationRequest) {
161         infoRequest.contentType = _parent->uti();
162         infoRequest.byteRangeAccessSupported = YES;
163         infoRequest.contentLength = _complete ? _data.get().length : _expectedContentSize;
164     }
165
166     if (auto dataRequest = request.dataRequest) {
167         long long availableLength = _data.get().length - dataRequest.requestedOffset;
168         if (availableLength <= 0)
169             return;
170
171         long long requestedLength;
172         if (dataRequest.requestsAllDataToEndOfResource)
173             requestedLength = availableLength;
174         else
175             requestedLength = std::min<long long>(availableLength, dataRequest.requestedLength);
176
177         auto range = NSMakeRange(static_cast<NSUInteger>(dataRequest.requestedOffset), static_cast<NSUInteger>(requestedLength));
178         NSData* requestedData = [_data subdataWithRange:range];
179         if (!requestedData)
180             return;
181
182         [dataRequest respondWithData:requestedData];
183
184         if (dataRequest.requestsAllDataToEndOfResource) {
185             if (!_complete)
186                 return;
187         } else if (dataRequest.requestedOffset + dataRequest.requestedLength > dataRequest.currentOffset)
188             return;
189     }
190
191     [request finishLoading];
192 }
193
194 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
195 {
196     LockHolder holder { _dataLock };
197
198     UNUSED_PARAM(resourceLoader);
199
200     if ([self canFulfillRequest:loadingRequest]) {
201         [self fulfillRequest:loadingRequest];
202         if (loadingRequest.finished)
203             return NO;
204     }
205
206     [self enqueueRequest:loadingRequest];
207     return YES;
208 }
209
210 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
211 {
212     LockHolder holder { _dataLock };
213
214     UNUSED_PARAM(resourceLoader);
215     _requests.removeAll(loadingRequest);
216 }
217 @end
218
219 namespace WebCore {
220
221 #pragma mark - Static Methods
222
223 static NSURL *customSchemeURL()
224 {
225     static NeverDestroyed<RetainPtr<NSURL>> url;
226     if (!url.get())
227         url.get() = adoptNS([[NSURL alloc] initWithString:@"custom-imagedecoderavfobjc://resource"]);
228
229     return url.get().get();
230 }
231
232 static NSDictionary *imageDecoderAssetOptions()
233 {
234     static NeverDestroyed<RetainPtr<NSDictionary>> options;
235     if (!options.get()) {
236         options.get() = @{
237             AVURLAssetReferenceRestrictionsKey: @(AVAssetReferenceRestrictionForbidAll),
238             AVURLAssetUsesNoPersistentCacheKey: @YES,
239         };
240     }
241
242     return options.get().get();
243 }
244
245 static ImageDecoderAVFObjC::RotationProperties transformToRotationProperties(AffineTransform inTransform)
246 {
247     ImageDecoderAVFObjC::RotationProperties rotation;
248     if (inTransform.isIdentity())
249         return rotation;
250
251     AffineTransform::DecomposedType decomposed { };
252     if (!inTransform.decompose(decomposed))
253         return rotation;
254
255     rotation.flipY = WTF::areEssentiallyEqual(decomposed.scaleX, -1.);
256     rotation.flipX = WTF::areEssentiallyEqual(decomposed.scaleY, -1.);
257     auto degrees = rad2deg(decomposed.angle);
258     while (degrees < 0)
259         degrees += 360;
260
261     // Only support rotation in multiples of 90ยบ:
262     if (WTF::areEssentiallyEqual(fmod(degrees, 90.), 0.))
263         rotation.angle = clampToUnsigned(degrees);
264
265     return rotation;
266 }
267
268 struct ImageDecoderAVFObjC::SampleData {
269     Seconds duration { 0 };
270     bool hasAlpha { false };
271     IntSize frameSize;
272     RetainPtr<CMSampleBufferRef> sample;
273     RetainPtr<CGImageRef> image;
274     MediaTime decodeTime;
275     MediaTime presentationTime;
276 };
277
278 #pragma mark - ImageDecoderAVFObjC
279
280 RefPtr<ImageDecoderAVFObjC> ImageDecoderAVFObjC::create(SharedBuffer& data, const String& mimeType, AlphaOption alphaOption, GammaAndColorProfileOption gammaAndColorProfileOption)
281 {
282     // AVFoundation may not be available at runtime.
283     if (!getAVURLAssetClass())
284         return nullptr;
285
286     if (!canLoad_VideoToolbox_VTCreateCGImageFromCVPixelBuffer())
287         return nullptr;
288
289     return adoptRef(*new ImageDecoderAVFObjC(data, mimeType, alphaOption, gammaAndColorProfileOption));
290 }
291
292 ImageDecoderAVFObjC::ImageDecoderAVFObjC(SharedBuffer& data, const String& mimeType, AlphaOption, GammaAndColorProfileOption)
293     : ImageDecoder()
294     , m_mimeType(mimeType)
295     , m_uti(WebCore::UTIFromMIMEType(mimeType))
296     , m_asset(adoptNS([allocAVURLAssetInstance() initWithURL:customSchemeURL() options:imageDecoderAssetOptions()]))
297     , m_loader(adoptNS([[WebCoreSharedBufferResourceLoaderDelegate alloc] initWithParent:this]))
298     , m_decompressionSession(WebCoreDecompressionSession::createRGB())
299 {
300     [m_loader updateData:data.createNSData().get() complete:NO];
301
302     [m_asset.get().resourceLoader setDelegate:m_loader.get() queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
303     [m_asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
304         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
305             protectedThis->setTrack(protectedThis->firstEnabledTrack());
306         });
307     }];
308 }
309
310 ImageDecoderAVFObjC::~ImageDecoderAVFObjC() = default;
311
312 bool ImageDecoderAVFObjC::canDecodeType(const String& mimeType)
313 {
314     return [getAVURLAssetClass() isPlayableExtendedMIMEType:mimeType];
315 }
316
317 AVAssetTrack *ImageDecoderAVFObjC::firstEnabledTrack()
318 {
319     NSArray<AVAssetTrack *> *videoTracks = [m_asset tracksWithMediaCharacteristic:AVMediaCharacteristicVisual];
320     NSUInteger firstEnabledIndex = [videoTracks indexOfObjectPassingTest:^(AVAssetTrack *track, NSUInteger, BOOL*) {
321         return track.enabled;
322     }];
323
324     if (firstEnabledIndex == NSNotFound) {
325         LOG(Images, "ImageDecoderAVFObjC::firstEnabledTrack(%p) - asset has no enabled video tracks", this);
326         return nil;
327     }
328
329     return [videoTracks objectAtIndex:firstEnabledIndex];
330 }
331
332 void ImageDecoderAVFObjC::readSampleMetadata()
333 {
334     if (!m_sampleData.isEmpty())
335         return;
336
337     // NOTE: there is no API to return the number of samples in the sample table. Instead,
338     // simply increment the sample in decode order by an arbitrarily large number.
339     RetainPtr<AVSampleCursor> cursor = [m_track makeSampleCursorAtFirstSampleInDecodeOrder];
340     int64_t sampleCount = 0;
341     if (cursor)
342         sampleCount = 1 + [cursor stepInDecodeOrderByCount:std::numeric_limits<int32_t>::max()];
343
344     // NOTE: there is no API to return the first sample cursor in presentation order. Instead,
345     // simply decrement sample in presentation order by an arbitrarily large number.
346     [cursor stepInPresentationOrderByCount:std::numeric_limits<int32_t>::min()];
347
348     ASSERT(sampleCount >= 0);
349     m_sampleData.resize(static_cast<size_t>(sampleCount));
350
351     if (!m_generator)
352         m_generator = [allocAVSampleBufferGeneratorInstance() initWithAsset:m_asset.get() timebase:nil];
353
354     for (size_t index = 0; index < static_cast<size_t>(sampleCount); ++index) {
355         auto& sampleData = m_sampleData[index];
356         sampleData.duration = Seconds(PAL::CMTimeGetSeconds([cursor currentSampleDuration]));
357         sampleData.decodeTime = PAL::toMediaTime([cursor decodeTimeStamp]);
358         sampleData.presentationTime = PAL::toMediaTime([cursor presentationTimeStamp]);
359         auto request = adoptNS([allocAVSampleBufferRequestInstance() initWithStartCursor:cursor.get()]);
360         sampleData.sample = adoptCF([m_generator createSampleBufferForRequest:request.get()]);
361         m_presentationTimeToIndex.insert(std::make_pair(sampleData.presentationTime, index));
362         [cursor stepInPresentationOrderByCount:1];
363     }
364 }
365
366 void ImageDecoderAVFObjC::readTrackMetadata()
367 {
368     if (!m_rotation)
369         m_rotation = transformToRotationProperties(CGAffineTransformConcat(m_asset.get().preferredTransform, m_track.get().preferredTransform));
370
371     if (!m_size) {
372         auto size = FloatSize(m_track.get().naturalSize);
373         auto angle = m_rotation.value().angle;
374         if (angle == 90 || angle == 270)
375             size = size.transposedSize();
376
377         m_size = expandedIntSize(size);
378     }
379 }
380
381 bool ImageDecoderAVFObjC::storeSampleBuffer(CMSampleBufferRef sampleBuffer)
382 {
383     auto pixelBuffer = m_decompressionSession->decodeSampleSync(sampleBuffer);
384     if (!pixelBuffer) {
385         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not decode sampleBuffer", this);
386         return false;
387     }
388
389     auto presentationTime = PAL::toMediaTime(PAL::CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
390     auto indexIter = m_presentationTimeToIndex.find(presentationTime);
391
392     if (m_rotation && !m_rotation.value().isIdentity()) {
393         auto& rotation = m_rotation.value();
394         if (!m_rotationSession) {
395             VTImageRotationSessionRef rawRotationSession = nullptr;
396             VTImageRotationSessionCreate(kCFAllocatorDefault, rotation.angle, &rawRotationSession);
397             m_rotationSession = rawRotationSession;
398             VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_EnableHighSpeedTransfer, kCFBooleanTrue);
399
400             if (rotation.flipY)
401                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipVerticalOrientation, kCFBooleanTrue);
402             if (rotation.flipX)
403                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipHorizontalOrientation, kCFBooleanTrue);
404         }
405
406         if (!m_rotationPool) {
407             auto pixelAttributes = (CFDictionaryRef)@{
408                 (NSString *)kCVPixelBufferWidthKey: @(m_size.value().width()),
409                 (NSString *)kCVPixelBufferHeightKey: @(m_size.value().height()),
410                 (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
411                 (NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
412             };
413             CVPixelBufferPoolRef rawPool = nullptr;
414             CVPixelBufferPoolCreate(kCFAllocatorDefault, nullptr, pixelAttributes, &rawPool);
415             m_rotationPool = adoptCF(rawPool);
416         }
417
418         CVPixelBufferRef rawRotatedBuffer = nullptr;
419         CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, m_rotationPool.get(), &rawRotatedBuffer);
420         auto status = VTImageRotationSessionTransferImage(m_rotationSession.get(), pixelBuffer.get(), rawRotatedBuffer);
421         if (status == noErr)
422             pixelBuffer = adoptCF(rawRotatedBuffer);
423     }
424
425     CGImageRef rawImage = nullptr;
426     if (noErr != VTCreateCGImageFromCVPixelBuffer(pixelBuffer.get(), nullptr, &rawImage)) {
427         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not create CGImage from pixelBuffer", this);
428         return false;
429     }
430
431     ASSERT(indexIter->second < m_sampleData.size());
432     auto& sampleData = m_sampleData[indexIter->second];
433     sampleData.image = adoptCF(rawImage);
434     sampleData.sample = nullptr;
435
436     auto alphaInfo = CGImageGetAlphaInfo(rawImage);
437     sampleData.hasAlpha = (alphaInfo != kCGImageAlphaNone && alphaInfo != kCGImageAlphaNoneSkipLast && alphaInfo != kCGImageAlphaNoneSkipFirst);
438
439     return true;
440 }
441
442 void ImageDecoderAVFObjC::advanceCursor()
443 {
444     if (![m_cursor stepInDecodeOrderByCount:1])
445         m_cursor = [m_track makeSampleCursorAtFirstSampleInDecodeOrder];
446 }
447
448 void ImageDecoderAVFObjC::setTrack(AVAssetTrack *track)
449 {
450     if (m_track == track)
451         return;
452     m_track = track;
453
454     LockHolder holder { m_sampleGeneratorLock };
455     m_sampleData.clear();
456     m_size.reset();
457     m_rotation.reset();
458     m_cursor = nullptr;
459     m_generator = nullptr;
460     m_rotationSession = nullptr;
461
462     [track loadValuesAsynchronouslyForKeys:@[@"naturalSize", @"preferredTransform"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
463         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
464             protectedThis->readTrackMetadata();
465             protectedThis->readSampleMetadata();
466         });
467     }];
468 }
469
470 EncodedDataStatus ImageDecoderAVFObjC::encodedDataStatus() const
471 {
472     if (m_sampleData.isEmpty())
473         return EncodedDataStatus::Unknown;
474     return EncodedDataStatus::Complete;
475 }
476
477 IntSize ImageDecoderAVFObjC::size() const
478 {
479     if (m_size)
480         return m_size.value();
481     return IntSize();
482 }
483
484 size_t ImageDecoderAVFObjC::frameCount() const
485 {
486     return m_sampleData.size();
487 }
488
489 RepetitionCount ImageDecoderAVFObjC::repetitionCount() const
490 {
491     // In the absence of instructions to the contrary, assume all media formats repeat infinitely.
492     // FIXME: Future media formats may embed repeat count information, and when that is available
493     // through AVAsset, account for it here.
494     return RepetitionCountInfinite;
495 }
496
497 String ImageDecoderAVFObjC::uti() const
498 {
499     return m_uti;
500 }
501
502 String ImageDecoderAVFObjC::filenameExtension() const
503 {
504     return MIMETypeRegistry::getPreferredExtensionForMIMEType(m_mimeType);
505 }
506
507 IntSize ImageDecoderAVFObjC::frameSizeAtIndex(size_t, SubsamplingLevel) const
508 {
509     return size();
510 }
511
512 bool ImageDecoderAVFObjC::frameIsCompleteAtIndex(size_t index) const
513 {
514     if (index >= m_sampleData.size())
515         return false;
516
517     auto sampleData = m_sampleData[index];
518     if (!sampleData.sample)
519         return false;
520
521     return PAL::CMSampleBufferDataIsReady(sampleData.sample.get());
522 }
523
524 ImageOrientation ImageDecoderAVFObjC::frameOrientationAtIndex(size_t) const
525 {
526     return ImageOrientation();
527 }
528
529 Seconds ImageDecoderAVFObjC::frameDurationAtIndex(size_t index) const
530 {
531     if (index < m_sampleData.size())
532         return m_sampleData[index].duration;
533     return { };
534 }
535
536 bool ImageDecoderAVFObjC::frameHasAlphaAtIndex(size_t index) const
537 {
538     if (index < m_sampleData.size())
539         return m_sampleData[index].hasAlpha;
540     return false;
541 }
542
543 bool ImageDecoderAVFObjC::frameAllowSubsamplingAtIndex(size_t index) const
544 {
545     return index <= m_sampleData.size();
546 }
547
548 unsigned ImageDecoderAVFObjC::frameBytesAtIndex(size_t index, SubsamplingLevel subsamplingLevel) const
549 {
550     if (!frameIsCompleteAtIndex(index))
551         return 0;
552
553     IntSize frameSize = frameSizeAtIndex(index, subsamplingLevel);
554     return (frameSize.area() * 4).unsafeGet();
555 }
556
557 NativeImagePtr ImageDecoderAVFObjC::createFrameImageAtIndex(size_t index, SubsamplingLevel, const DecodingOptions&)
558 {
559     LockHolder holder { m_sampleGeneratorLock };
560
561     if (index >= m_sampleData.size())
562         return nullptr;
563
564     auto& sampleData = m_sampleData[index];
565     if (sampleData.image)
566         return sampleData.image;
567
568     if (!m_cursor)
569         m_cursor = [m_track makeSampleCursorAtFirstSampleInDecodeOrder];
570
571     auto frameCursor = [m_track makeSampleCursorWithPresentationTimeStamp:PAL::toCMTime(sampleData.presentationTime)];
572     if ([frameCursor comparePositionInDecodeOrderWithPositionOfCursor:m_cursor.get()] == NSOrderedAscending)  {
573         // Rewind cursor to the last sync sample to begin decoding
574         m_cursor = adoptNS([frameCursor copy]);
575         do {
576             if ([m_cursor currentSampleSyncInfo].sampleIsFullSync)
577                 break;
578         } while ([m_cursor stepInDecodeOrderByCount:-1] == -1);
579
580     }
581
582     if (!m_generator)
583         m_generator = [allocAVSampleBufferGeneratorInstance() initWithAsset:m_asset.get() timebase:nil];
584
585     RetainPtr<CGImageRef> image;
586     while (true) {
587         if ([frameCursor comparePositionInDecodeOrderWithPositionOfCursor:m_cursor.get()] == NSOrderedAscending)
588             return nullptr;
589
590         auto presentationTime = PAL::toMediaTime(m_cursor.get().presentationTimeStamp);
591         auto indexIter = m_presentationTimeToIndex.find(presentationTime);
592         advanceCursor();
593
594         if (indexIter == m_presentationTimeToIndex.end())
595             return nullptr;
596
597         auto& cursorSampleData = m_sampleData[indexIter->second];
598
599         if (!cursorSampleData.sample) {
600             auto request = adoptNS([allocAVSampleBufferRequestInstance() initWithStartCursor:frameCursor]);
601             cursorSampleData.sample = adoptCF([m_generator createSampleBufferForRequest:request.get()]);
602         }
603
604         if (!cursorSampleData.sample)
605             return nullptr;
606
607         if (!storeSampleBuffer(cursorSampleData.sample.get()))
608             return nullptr;
609
610         if (sampleData.image)
611             return sampleData.image;
612     }
613
614     ASSERT_NOT_REACHED();
615     return nullptr;
616 }
617
618 void ImageDecoderAVFObjC::setExpectedContentSize(long long expectedContentSize)
619 {
620     m_loader.get().expectedContentSize = expectedContentSize;
621 }
622
623 void ImageDecoderAVFObjC::setData(SharedBuffer& data, bool allDataReceived)
624 {
625     [m_loader updateData:data.createNSData().get() complete:allDataReceived];
626
627     if (allDataReceived) {
628         m_isAllDataReceived = true;
629
630         if (!m_track)
631             setTrack(firstEnabledTrack());
632
633         readTrackMetadata();
634         readSampleMetadata();
635     }
636 }
637
638 void ImageDecoderAVFObjC::clearFrameBufferCache(size_t index)
639 {
640     for (size_t i = 0; i < index; ++i)
641         m_sampleData[i].image = nullptr;
642 }
643
644 }
645
646 #endif