[WTF] Add makeUnique<T>, which ensures T is fast-allocated, makeUnique / makeUniqueWi...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / ImageDecoderAVFObjC.mm
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "ImageDecoderAVFObjC.h"
28
29 #if HAVE(AVASSETREADER)
30
31 #import "AVAssetMIMETypeCache.h"
32 #import "AffineTransform.h"
33 #import "ContentType.h"
34 #import "FloatQuad.h"
35 #import "FloatRect.h"
36 #import "FloatSize.h"
37 #import "ImageRotationSessionVT.h"
38 #import "Logging.h"
39 #import "MIMETypeRegistry.h"
40 #import "MediaSampleAVFObjC.h"
41 #import "SharedBuffer.h"
42 #import "UTIUtilities.h"
43 #import "WebCoreDecompressionSession.h"
44 #import <AVFoundation/AVAsset.h>
45 #import <AVFoundation/AVAssetReader.h>
46 #import <AVFoundation/AVAssetReaderOutput.h>
47 #import <AVFoundation/AVAssetResourceLoader.h>
48 #import <AVFoundation/AVAssetTrack.h>
49 #import <AVFoundation/AVTime.h>
50 #import <VideoToolbox/VTUtilities.h>
51 #import <pal/avfoundation/MediaTimeAVFoundation.h>
52 #import <wtf/MainThread.h>
53 #import <wtf/MediaTime.h>
54 #import <wtf/NeverDestroyed.h>
55 #import <wtf/Optional.h>
56 #import <wtf/Vector.h>
57
58 #import "CoreVideoSoftLink.h"
59 #import "VideoToolboxSoftLink.h"
60 #import <pal/cf/CoreMediaSoftLink.h>
61 #import <pal/cocoa/AVFoundationSoftLink.h>
62
63 #pragma mark -
64
65 @interface WebCoreSharedBufferResourceLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
66     WebCore::ImageDecoderAVFObjC* _parent;
67     long long _expectedContentSize;
68     RetainPtr<NSData> _data;
69     bool _complete;
70     Vector<RetainPtr<AVAssetResourceLoadingRequest>> _requests;
71     Lock _dataLock;
72 }
73 @property (readonly) NSData* data;
74 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent;
75 - (void)setExpectedContentSize:(long long)expectedContentSize;
76 - (void)updateData:(NSData *)data complete:(BOOL)complete;
77 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
78 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
79 - (void)fulfillPendingRequests;
80 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
81 @end
82
83 @implementation WebCoreSharedBufferResourceLoaderDelegate
84 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent
85 {
86     if (!(self = [super init]))
87         return nil;
88     _parent = parent;
89
90     return self;
91 }
92
93 - (NSData*)data
94 {
95     return _data.get();
96 }
97
98 - (void)setExpectedContentSize:(long long)expectedContentSize
99 {
100     LockHolder holder { _dataLock };
101     _expectedContentSize = expectedContentSize;
102
103     [self fulfillPendingRequests];
104 }
105
106 - (void)updateData:(NSData *)data complete:(BOOL)complete
107 {
108     LockHolder holder { _dataLock };
109     _data = data;
110     _complete = complete;
111
112     [self fulfillPendingRequests];
113 }
114
115 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)request
116 {
117     if (!request)
118         return NO;
119
120     if (request.finished || request.cancelled)
121         return NO;
122
123     // AVURLAsset's resource loader requires knowing the expected content size
124     // to load sucessfully. That requires either having the complete data for
125     // the resource, or knowing the expected content size. 
126     if (!_complete && !_expectedContentSize)
127         return NO;
128
129     if (auto dataRequest = request.dataRequest) {
130         if (dataRequest.requestedOffset > static_cast<long long>(_data.get().length))
131             return NO;
132     }
133
134     return YES;
135 }
136
137 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest
138 {
139     ASSERT(!_requests.contains(loadingRequest));
140     _requests.append(loadingRequest);
141 }
142
143 - (void)fulfillPendingRequests
144 {
145     for (auto& request : _requests) {
146         if ([self canFulfillRequest:request.get()])
147             [self fulfillRequest:request.get()];
148     }
149
150     _requests.removeAllMatching([] (auto& request) {
151         return request.get().finished;
152     });
153 }
154
155 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)request
156 {
157     if (auto infoRequest = request.contentInformationRequest) {
158         infoRequest.contentType = _parent->uti();
159         infoRequest.byteRangeAccessSupported = YES;
160         infoRequest.contentLength = _complete ? _data.get().length : _expectedContentSize;
161     }
162
163     if (auto dataRequest = request.dataRequest) {
164         long long availableLength = _data.get().length - dataRequest.requestedOffset;
165         if (availableLength <= 0)
166             return;
167
168         long long requestedLength;
169         if (dataRequest.requestsAllDataToEndOfResource)
170             requestedLength = availableLength;
171         else
172             requestedLength = std::min<long long>(availableLength, dataRequest.requestedLength);
173
174         auto range = NSMakeRange(static_cast<NSUInteger>(dataRequest.requestedOffset), static_cast<NSUInteger>(requestedLength));
175         NSData* requestedData = [_data subdataWithRange:range];
176         if (!requestedData)
177             return;
178
179         [dataRequest respondWithData:requestedData];
180
181         if (dataRequest.requestsAllDataToEndOfResource) {
182             if (!_complete)
183                 return;
184         } else if (dataRequest.requestedOffset + dataRequest.requestedLength > dataRequest.currentOffset)
185             return;
186     }
187
188     [request finishLoading];
189 }
190
191 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
192 {
193     LockHolder holder { _dataLock };
194
195     UNUSED_PARAM(resourceLoader);
196
197     if ([self canFulfillRequest:loadingRequest]) {
198         [self fulfillRequest:loadingRequest];
199         if (loadingRequest.finished)
200             return YES;
201     }
202
203     [self enqueueRequest:loadingRequest];
204     return YES;
205 }
206
207 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
208 {
209     LockHolder holder { _dataLock };
210
211     UNUSED_PARAM(resourceLoader);
212     _requests.removeAll(loadingRequest);
213 }
214 @end
215
216 namespace WebCore {
217
218 #pragma mark - Static Methods
219
220 static NSURL *customSchemeURL()
221 {
222     static NSURL *url = [[NSURL alloc] initWithString:@"custom-imagedecoderavfobjc://resource"];
223     return url;
224 }
225
226 static NSDictionary *imageDecoderAssetOptions()
227 {
228     static NSDictionary *options = [] {
229         return [@{
230             AVURLAssetReferenceRestrictionsKey: @(AVAssetReferenceRestrictionForbidAll),
231             AVURLAssetUsesNoPersistentCacheKey: @YES,
232         } retain];
233     }();
234     return options;
235 }
236
237 class ImageDecoderAVFObjCSample : public MediaSampleAVFObjC {
238 public:
239     static Ref<ImageDecoderAVFObjCSample> create(RetainPtr<CMSampleBufferRef>&& sampleBuffer)
240     {
241         return adoptRef(*new ImageDecoderAVFObjCSample(WTFMove(sampleBuffer)));
242     }
243
244     CGImageRef image() const { return m_image.get(); }
245     void setImage(RetainPtr<CGImageRef>&& image)
246     {
247         m_image = WTFMove(image);
248         if (!m_image) {
249             m_hasAlpha = false;
250             return;
251         }
252
253         auto alphaInfo = CGImageGetAlphaInfo(m_image.get());
254         m_hasAlpha = alphaInfo != kCGImageAlphaNone && alphaInfo != kCGImageAlphaNoneSkipLast && alphaInfo != kCGImageAlphaNoneSkipFirst;
255     }
256
257     struct ByteRange {
258         size_t byteOffset { 0 };
259         size_t byteLength { 0 };
260     };
261
262     Optional<ByteRange> byteRange() const
263     {
264         if (PAL::CMSampleBufferGetDataBuffer(m_sample.get())
265             || PAL::CMSampleBufferGetImageBuffer(m_sample.get())
266             || !PAL::CMSampleBufferDataIsReady(m_sample.get()))
267             return WTF::nullopt;
268
269         CFNumberRef byteOffsetCF = (CFNumberRef)PAL::CMGetAttachment(m_sample.get(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceByteOffset, nullptr);
270         if (!byteOffsetCF || CFGetTypeID(byteOffsetCF) != CFNumberGetTypeID())
271             return WTF::nullopt;
272
273         int64_t byteOffset { 0 };
274         if (!CFNumberGetValue(byteOffsetCF, kCFNumberSInt64Type, &byteOffset))
275             return WTF::nullopt;
276
277         CMItemCount sizeArrayEntries = 0;
278         PAL::CMSampleBufferGetSampleSizeArray(m_sample.get(), 0, nullptr, &sizeArrayEntries);
279         if (sizeArrayEntries != 1)
280             return WTF::nullopt;
281
282         size_t singleSizeEntry;
283         PAL::CMSampleBufferGetSampleSizeArray(m_sample.get(), 1, &singleSizeEntry, nullptr);
284         return {{static_cast<size_t>(byteOffset), singleSizeEntry}};
285     }
286
287     SampleFlags flags() const override
288     {
289         return (SampleFlags)(MediaSampleAVFObjC::flags() | (m_hasAlpha ? HasAlpha : 0));
290     }
291
292 private:
293     ImageDecoderAVFObjCSample(RetainPtr<CMSampleBufferRef>&& sample)
294         : MediaSampleAVFObjC(WTFMove(sample))
295     {
296     }
297
298     RetainPtr<CGImageRef> m_image;
299     bool m_hasAlpha { false };
300 };
301
302 static ImageDecoderAVFObjCSample* toSample(const PresentationOrderSampleMap::value_type& pair)
303 {
304     return (ImageDecoderAVFObjCSample*)pair.second.get();
305 }
306
307 template <typename Iterator>
308 ImageDecoderAVFObjCSample* toSample(Iterator iter)
309 {
310     return (ImageDecoderAVFObjCSample*)iter->second.get();
311 }
312
313 #pragma mark - ImageDecoderAVFObjC
314
315 RefPtr<ImageDecoderAVFObjC> ImageDecoderAVFObjC::create(SharedBuffer& data, const String& mimeType, AlphaOption alphaOption, GammaAndColorProfileOption gammaAndColorProfileOption)
316 {
317     // AVFoundation may not be available at runtime.
318     if (!AVAssetMIMETypeCache::singleton().isAvailable())
319         return nullptr;
320
321     if (!canLoad_VideoToolbox_VTCreateCGImageFromCVPixelBuffer())
322         return nullptr;
323
324     return adoptRef(*new ImageDecoderAVFObjC(data, mimeType, alphaOption, gammaAndColorProfileOption));
325 }
326
327 ImageDecoderAVFObjC::ImageDecoderAVFObjC(SharedBuffer& data, const String& mimeType, AlphaOption, GammaAndColorProfileOption)
328     : ImageDecoder()
329     , m_mimeType(mimeType)
330     , m_uti(WebCore::UTIFromMIMEType(mimeType))
331     , m_asset(adoptNS([PAL::allocAVURLAssetInstance() initWithURL:customSchemeURL() options:imageDecoderAssetOptions()]))
332     , m_loader(adoptNS([[WebCoreSharedBufferResourceLoaderDelegate alloc] initWithParent:this]))
333     , m_decompressionSession(WebCoreDecompressionSession::createRGB())
334 {
335     [m_loader updateData:data.createNSData().get() complete:NO];
336
337     [m_asset.get().resourceLoader setDelegate:m_loader.get() queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
338     [m_asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
339         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
340             protectedThis->setTrack(protectedThis->firstEnabledTrack());
341         });
342     }];
343 }
344
345 ImageDecoderAVFObjC::~ImageDecoderAVFObjC() = default;
346
347 bool ImageDecoderAVFObjC::supportsMediaType(MediaType type)
348 {
349     return type == MediaType::Video && AVAssetMIMETypeCache::singleton().isAvailable();
350 }
351
352 bool ImageDecoderAVFObjC::supportsContentType(const ContentType& type)
353 {
354     return AVAssetMIMETypeCache::singleton().supportsContentType(type);
355 }
356
357 bool ImageDecoderAVFObjC::canDecodeType(const String& mimeType)
358 {
359     return AVAssetMIMETypeCache::singleton().canDecodeType(mimeType);
360 }
361
362 AVAssetTrack *ImageDecoderAVFObjC::firstEnabledTrack()
363 {
364     NSArray<AVAssetTrack *> *videoTracks = [m_asset tracksWithMediaCharacteristic:AVMediaCharacteristicVisual];
365     NSUInteger firstEnabledIndex = [videoTracks indexOfObjectPassingTest:^(AVAssetTrack *track, NSUInteger, BOOL*) {
366         return track.enabled;
367     }];
368
369     if (firstEnabledIndex == NSNotFound) {
370         LOG(Images, "ImageDecoderAVFObjC::firstEnabledTrack(%p) - asset has no enabled video tracks", this);
371         return nil;
372     }
373
374     return [videoTracks objectAtIndex:firstEnabledIndex];
375 }
376
377 void ImageDecoderAVFObjC::readSamples()
378 {
379     if (!m_sampleData.empty())
380         return;
381
382     auto assetReader = adoptNS([PAL::allocAVAssetReaderInstance() initWithAsset:m_asset.get() error:nil]);
383     auto referenceOutput = adoptNS([PAL::allocAVAssetReaderSampleReferenceOutputInstance() initWithTrack:m_track.get()]);
384
385     referenceOutput.get().alwaysCopiesSampleData = NO;
386     [assetReader addOutput:referenceOutput.get()];
387     [assetReader startReading];
388
389     while (auto sampleBuffer = adoptCF([referenceOutput copyNextSampleBuffer])) {
390         // NOTE: Some samples emitted by the AVAssetReader simply denote the boundary of edits
391         // and do not carry media data.
392         if (!(PAL::CMSampleBufferGetNumSamples(sampleBuffer.get())))
393             continue;
394         m_sampleData.addSample(ImageDecoderAVFObjCSample::create(WTFMove(sampleBuffer)).get());
395     }
396
397     if (m_encodedDataStatusChangedCallback)
398         m_encodedDataStatusChangedCallback(encodedDataStatus());
399 }
400
401 void ImageDecoderAVFObjC::readTrackMetadata()
402 {
403     AffineTransform finalTransform = CGAffineTransformConcat(m_asset.get().preferredTransform, m_track.get().preferredTransform);
404     auto size = expandedIntSize(FloatSize(m_track.get().naturalSize));
405     if (finalTransform.isIdentity()) {
406         m_size = size;
407         m_imageRotationSession = nullptr;
408         return;
409     }
410
411     if (!m_imageRotationSession
412         || !m_imageRotationSession->transform()
413         || m_imageRotationSession->transform().value() != finalTransform
414         || m_imageRotationSession->size() != size)
415         m_imageRotationSession = makeUnique<ImageRotationSessionVT>(WTFMove(finalTransform), size, kCVPixelFormatType_32BGRA, ImageRotationSessionVT::IsCGImageCompatible::Yes);
416
417     m_size = expandedIntSize(m_imageRotationSession->rotatedSize());
418 }
419
420 bool ImageDecoderAVFObjC::storeSampleBuffer(CMSampleBufferRef sampleBuffer)
421 {
422     auto pixelBuffer = m_decompressionSession->decodeSampleSync(sampleBuffer);
423     if (!pixelBuffer) {
424         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not decode sampleBuffer", this);
425         return false;
426     }
427
428     auto presentationTime = PAL::toMediaTime(PAL::CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
429     auto iter = m_sampleData.presentationOrder().findSampleWithPresentationTime(presentationTime);
430
431     if (m_imageRotationSession)
432         pixelBuffer = m_imageRotationSession->rotate(pixelBuffer.get());
433
434     CGImageRef rawImage = nullptr;
435     if (noErr != VTCreateCGImageFromCVPixelBuffer(pixelBuffer.get(), nullptr, &rawImage)) {
436         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not create CGImage from pixelBuffer", this);
437         return false;
438     }
439
440     ASSERT(iter != m_sampleData.presentationOrder().end());
441     toSample(iter)->setImage(adoptCF(rawImage));
442
443     return true;
444 }
445
446 void ImageDecoderAVFObjC::advanceCursor()
447 {
448     if (m_cursor == m_sampleData.decodeOrder().end() || ++m_cursor == m_sampleData.decodeOrder().end())
449         m_cursor = m_sampleData.decodeOrder().begin();
450 }
451
452 void ImageDecoderAVFObjC::setTrack(AVAssetTrack *track)
453 {
454     if (m_track == track)
455         return;
456     m_track = track;
457
458     LockHolder holder { m_sampleGeneratorLock };
459     m_sampleData.clear();
460     m_size.reset();
461     m_cursor = m_sampleData.decodeOrder().end();
462     m_imageRotationSession = nullptr;
463
464     [track loadValuesAsynchronouslyForKeys:@[@"naturalSize", @"preferredTransform"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
465         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
466             protectedThis->readTrackMetadata();
467             protectedThis->readSamples();
468         });
469     }];
470 }
471
472 void ImageDecoderAVFObjC::setEncodedDataStatusChangeCallback(WTF::Function<void(EncodedDataStatus)>&& callback)
473 {
474     m_encodedDataStatusChangedCallback = WTFMove(callback);
475 }
476
477 EncodedDataStatus ImageDecoderAVFObjC::encodedDataStatus() const
478 {
479     if (!m_sampleData.empty())
480         return EncodedDataStatus::Complete;
481     if (m_size)
482         return EncodedDataStatus::SizeAvailable;
483     if (m_track)
484         return EncodedDataStatus::TypeAvailable;
485     return EncodedDataStatus::Unknown;
486 }
487
488 IntSize ImageDecoderAVFObjC::size() const
489 {
490     if (m_size)
491         return m_size.value();
492     return IntSize();
493 }
494
495 size_t ImageDecoderAVFObjC::frameCount() const
496 {
497     return m_sampleData.size();
498 }
499
500 RepetitionCount ImageDecoderAVFObjC::repetitionCount() const
501 {
502     // In the absence of instructions to the contrary, assume all media formats repeat infinitely.
503     // FIXME: Future media formats may embed repeat count information, and when that is available
504     // through AVAsset, account for it here.
505     return frameCount() > 1 ? RepetitionCountInfinite : RepetitionCountNone;
506 }
507
508 String ImageDecoderAVFObjC::uti() const
509 {
510     return m_uti;
511 }
512
513 String ImageDecoderAVFObjC::filenameExtension() const
514 {
515     return MIMETypeRegistry::getPreferredExtensionForMIMEType(m_mimeType);
516 }
517
518 IntSize ImageDecoderAVFObjC::frameSizeAtIndex(size_t, SubsamplingLevel) const
519 {
520     return size();
521 }
522
523 bool ImageDecoderAVFObjC::frameIsCompleteAtIndex(size_t index) const
524 {
525     auto* sampleData = sampleAtIndex(index);
526     if (!sampleData)
527         return false;
528
529     return sampleIsComplete(*sampleData);
530 }
531
532 ImageOrientation ImageDecoderAVFObjC::frameOrientationAtIndex(size_t) const
533 {
534     return ImageOrientation::None;
535 }
536
537 Seconds ImageDecoderAVFObjC::frameDurationAtIndex(size_t index) const
538 {
539     auto* sampleData = sampleAtIndex(index);
540     if (!sampleData)
541         return { };
542
543     return Seconds(sampleData->duration().toDouble());
544 }
545
546 bool ImageDecoderAVFObjC::frameHasAlphaAtIndex(size_t index) const
547 {
548     auto* sampleData = sampleAtIndex(index);
549     return sampleData ? sampleData->hasAlpha() : false;
550 }
551
552 bool ImageDecoderAVFObjC::frameAllowSubsamplingAtIndex(size_t index) const
553 {
554     return index <= m_sampleData.size();
555 }
556
557 unsigned ImageDecoderAVFObjC::frameBytesAtIndex(size_t index, SubsamplingLevel subsamplingLevel) const
558 {
559     if (!frameIsCompleteAtIndex(index))
560         return 0;
561
562     IntSize frameSize = frameSizeAtIndex(index, subsamplingLevel);
563     return (frameSize.area() * 4).unsafeGet();
564 }
565
566 NativeImagePtr ImageDecoderAVFObjC::createFrameImageAtIndex(size_t index, SubsamplingLevel, const DecodingOptions&)
567 {
568     LockHolder holder { m_sampleGeneratorLock };
569
570     auto* sampleData = sampleAtIndex(index);
571     if (!sampleData)
572         return nullptr;
573
574     if (auto image = sampleData->image())
575         return image;
576
577     if (m_cursor == m_sampleData.decodeOrder().end())
578         m_cursor = m_sampleData.decodeOrder().begin();
579
580     auto decodeTime = sampleData->decodeTime();
581
582     if (decodeTime < m_cursor->second->decodeTime()) {
583         // Rewind cursor to the last sync sample to begin decoding
584         m_cursor = m_sampleData.decodeOrder().findSampleWithDecodeKey({decodeTime, sampleData->presentationTime()});
585         do {
586             if (m_cursor->second->isSync())
587                 break;
588         } while (--m_cursor != m_sampleData.decodeOrder().begin());
589     }
590
591     RetainPtr<CGImageRef> image;
592     while (true) {
593         if (decodeTime < m_cursor->second->decodeTime())
594             return nullptr;
595
596         auto cursorSample = toSample(m_cursor);
597         if (!cursorSample)
598             return nullptr;
599
600         if (!sampleIsComplete(*cursorSample))
601             return nullptr;
602
603         if (auto byteRange = cursorSample->byteRange()) {
604             auto& byteRangeValue = byteRange.value();
605             auto* data = m_loader.get().data;
606             CMBlockBufferCustomBlockSource source {
607                 0,
608                 nullptr,
609                 [](void* refcon, void*, size_t) {
610                     [(id)refcon release];
611                 },
612                 [data retain]
613             };
614             CMBlockBufferRef rawBlockBuffer = nullptr;
615             if (noErr != PAL::CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, const_cast<void*>(data.bytes), data.length, nullptr, &source, byteRangeValue.byteOffset, byteRangeValue.byteLength, 0, &rawBlockBuffer))
616                 return nullptr;
617
618             if (!rawBlockBuffer)
619                 return nullptr;
620
621             if (noErr != PAL::CMSampleBufferSetDataBuffer(cursorSample->sampleBuffer(), rawBlockBuffer))
622                 return nullptr;
623             CFRelease(rawBlockBuffer);
624
625             PAL::CMRemoveAttachment(cursorSample->sampleBuffer(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceByteOffset);
626             PAL::CMRemoveAttachment(cursorSample->sampleBuffer(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceURL);
627         }
628
629         auto cursorSampleBuffer = cursorSample->sampleBuffer();
630         if (!cursorSampleBuffer)
631             break;
632
633         if (!storeSampleBuffer(cursorSampleBuffer))
634             break;
635
636         advanceCursor();
637         if (auto image = sampleData->image())
638             return image;
639     }
640
641     advanceCursor();
642     return nullptr;
643 }
644
645 void ImageDecoderAVFObjC::setExpectedContentSize(long long expectedContentSize)
646 {
647     m_loader.get().expectedContentSize = expectedContentSize;
648 }
649
650 void ImageDecoderAVFObjC::setData(SharedBuffer& data, bool allDataReceived)
651 {
652     [m_loader updateData:data.createNSData().get() complete:allDataReceived];
653
654     if (allDataReceived) {
655         m_isAllDataReceived = true;
656
657         if (!m_track)
658             setTrack(firstEnabledTrack());
659
660         if (!m_track)
661             return;
662
663         readTrackMetadata();
664         readSamples();
665     }
666 }
667
668 void ImageDecoderAVFObjC::clearFrameBufferCache(size_t index)
669 {
670     size_t i = 0;
671     for (auto& samplePair : m_sampleData.presentationOrder()) {
672         toSample(samplePair)->setImage(nullptr);
673         if (++i > index)
674             break;
675     }
676 }
677
678 const ImageDecoderAVFObjCSample* ImageDecoderAVFObjC::sampleAtIndex(size_t index) const
679 {
680     if (index >= m_sampleData.presentationOrder().size())
681         return nullptr;
682
683     // FIXME: std::map is not random-accessible; this can get expensive if callers repeatedly call
684     // with monotonically increasing indexes. Investigate adding an O(1) side structure to make this
685     // style of access faster.
686     auto iter = m_sampleData.presentationOrder().begin();
687     for (size_t i = 0; i != index; ++i)
688         ++iter;
689
690     return toSample(iter);
691 }
692
693 bool ImageDecoderAVFObjC::sampleIsComplete(const ImageDecoderAVFObjCSample& sample) const
694 {
695     if (auto byteRange = sample.byteRange()) {
696         auto& byteRangeValue = byteRange.value();
697         return byteRangeValue.byteOffset + byteRangeValue.byteLength <= m_loader.get().data.length;
698     }
699
700     return PAL::CMSampleBufferDataIsReady(sample.sampleBuffer());
701 }
702
703 }
704
705 #endif