Unreviewed, rolling out r244627.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / ImageDecoderAVFObjC.mm
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "ImageDecoderAVFObjC.h"
28
29 #if HAVE(AVASSETREADER)
30
31 #import "AVFoundationMIMETypeCache.h"
32 #import "AffineTransform.h"
33 #import "ContentType.h"
34 #import "FloatQuad.h"
35 #import "FloatRect.h"
36 #import "FloatSize.h"
37 #import "Logging.h"
38 #import "MIMETypeRegistry.h"
39 #import "MediaSampleAVFObjC.h"
40 #import "SharedBuffer.h"
41 #import "UTIUtilities.h"
42 #import "WebCoreDecompressionSession.h"
43 #import <AVFoundation/AVAsset.h>
44 #import <AVFoundation/AVAssetReader.h>
45 #import <AVFoundation/AVAssetReaderOutput.h>
46 #import <AVFoundation/AVAssetResourceLoader.h>
47 #import <AVFoundation/AVAssetTrack.h>
48 #import <AVFoundation/AVTime.h>
49 #import <VideoToolbox/VTUtilities.h>
50 #import <pal/avfoundation/MediaTimeAVFoundation.h>
51 #import <wtf/MainThread.h>
52 #import <wtf/MediaTime.h>
53 #import <wtf/NeverDestroyed.h>
54 #import <wtf/Optional.h>
55 #import <wtf/SoftLinking.h>
56 #import <wtf/Vector.h>
57
58 #import <pal/cf/CoreMediaSoftLink.h>
59 #import "CoreVideoSoftLink.h"
60 #import "VideoToolboxSoftLink.h"
61
62 #pragma mark - Soft Linking
63
64 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
65 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
66 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAssetReader)
67 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAssetReaderSampleReferenceOutput)
68 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVMediaCharacteristicVisual, NSString *)
69 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
70 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
71 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
72 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
73 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
74
75 #pragma mark -
76
77 @interface WebCoreSharedBufferResourceLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
78     WebCore::ImageDecoderAVFObjC* _parent;
79     long long _expectedContentSize;
80     RetainPtr<NSData> _data;
81     bool _complete;
82     Vector<RetainPtr<AVAssetResourceLoadingRequest>> _requests;
83     Lock _dataLock;
84 }
85 @property (readonly) NSData* data;
86 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent;
87 - (void)setExpectedContentSize:(long long)expectedContentSize;
88 - (void)updateData:(NSData *)data complete:(BOOL)complete;
89 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
90 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
91 - (void)fulfillPendingRequests;
92 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)loadingRequest;
93 @end
94
95 @implementation WebCoreSharedBufferResourceLoaderDelegate
96 - (id)initWithParent:(WebCore::ImageDecoderAVFObjC*)parent
97 {
98     if (!(self = [super init]))
99         return nil;
100     _parent = parent;
101
102     return self;
103 }
104
105 - (NSData*)data
106 {
107     return _data.get();
108 }
109
110 - (void)setExpectedContentSize:(long long)expectedContentSize
111 {
112     LockHolder holder { _dataLock };
113     _expectedContentSize = expectedContentSize;
114
115     [self fulfillPendingRequests];
116 }
117
118 - (void)updateData:(NSData *)data complete:(BOOL)complete
119 {
120     LockHolder holder { _dataLock };
121     _data = data;
122     _complete = complete;
123
124     [self fulfillPendingRequests];
125 }
126
127 - (BOOL)canFulfillRequest:(AVAssetResourceLoadingRequest *)request
128 {
129     if (!request)
130         return NO;
131
132     if (request.finished || request.cancelled)
133         return NO;
134
135     // AVURLAsset's resource loader requires knowing the expected content size
136     // to load sucessfully. That requires either having the complete data for
137     // the resource, or knowing the expected content size. 
138     if (!_complete && !_expectedContentSize)
139         return NO;
140
141     if (auto dataRequest = request.dataRequest) {
142         if (dataRequest.requestedOffset > static_cast<long long>(_data.get().length))
143             return NO;
144     }
145
146     return YES;
147 }
148
149 - (void)enqueueRequest:(AVAssetResourceLoadingRequest *)loadingRequest
150 {
151     ASSERT(!_requests.contains(loadingRequest));
152     _requests.append(loadingRequest);
153 }
154
155 - (void)fulfillPendingRequests
156 {
157     for (auto& request : _requests) {
158         if ([self canFulfillRequest:request.get()])
159             [self fulfillRequest:request.get()];
160     }
161
162     _requests.removeAllMatching([] (auto& request) {
163         return request.get().finished;
164     });
165 }
166
167 - (void)fulfillRequest:(AVAssetResourceLoadingRequest *)request
168 {
169     if (auto infoRequest = request.contentInformationRequest) {
170         infoRequest.contentType = _parent->uti();
171         infoRequest.byteRangeAccessSupported = YES;
172         infoRequest.contentLength = _complete ? _data.get().length : _expectedContentSize;
173     }
174
175     if (auto dataRequest = request.dataRequest) {
176         long long availableLength = _data.get().length - dataRequest.requestedOffset;
177         if (availableLength <= 0)
178             return;
179
180         long long requestedLength;
181         if (dataRequest.requestsAllDataToEndOfResource)
182             requestedLength = availableLength;
183         else
184             requestedLength = std::min<long long>(availableLength, dataRequest.requestedLength);
185
186         auto range = NSMakeRange(static_cast<NSUInteger>(dataRequest.requestedOffset), static_cast<NSUInteger>(requestedLength));
187         NSData* requestedData = [_data subdataWithRange:range];
188         if (!requestedData)
189             return;
190
191         [dataRequest respondWithData:requestedData];
192
193         if (dataRequest.requestsAllDataToEndOfResource) {
194             if (!_complete)
195                 return;
196         } else if (dataRequest.requestedOffset + dataRequest.requestedLength > dataRequest.currentOffset)
197             return;
198     }
199
200     [request finishLoading];
201 }
202
203 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
204 {
205     LockHolder holder { _dataLock };
206
207     UNUSED_PARAM(resourceLoader);
208
209     if ([self canFulfillRequest:loadingRequest]) {
210         [self fulfillRequest:loadingRequest];
211         if (loadingRequest.finished)
212             return YES;
213     }
214
215     [self enqueueRequest:loadingRequest];
216     return YES;
217 }
218
219 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
220 {
221     LockHolder holder { _dataLock };
222
223     UNUSED_PARAM(resourceLoader);
224     _requests.removeAll(loadingRequest);
225 }
226 @end
227
228 namespace WebCore {
229
230 #pragma mark - Static Methods
231
232 static NSURL *customSchemeURL()
233 {
234     static NSURL *url = [[NSURL alloc] initWithString:@"custom-imagedecoderavfobjc://resource"];
235     return url;
236 }
237
238 static NSDictionary *imageDecoderAssetOptions()
239 {
240     static NSDictionary *options = [] {
241         // FIXME: Are these keys really optional?
242         if (!canLoadAVURLAssetReferenceRestrictionsKey() || !canLoadAVURLAssetUsesNoPersistentCacheKey())
243             return [@{ } retain];
244         return [@{
245             AVURLAssetReferenceRestrictionsKey: @(AVAssetReferenceRestrictionForbidAll),
246             AVURLAssetUsesNoPersistentCacheKey: @YES,
247         } retain];
248     }();
249     return options;
250 }
251
252 static ImageDecoderAVFObjC::RotationProperties transformToRotationProperties(AffineTransform inTransform)
253 {
254     ImageDecoderAVFObjC::RotationProperties rotation;
255     if (inTransform.isIdentity())
256         return rotation;
257
258     AffineTransform::DecomposedType decomposed { };
259     if (!inTransform.decompose(decomposed))
260         return rotation;
261
262     rotation.flipY = WTF::areEssentiallyEqual(decomposed.scaleX, -1.);
263     rotation.flipX = WTF::areEssentiallyEqual(decomposed.scaleY, -1.);
264     auto degrees = rad2deg(decomposed.angle);
265     while (degrees < 0)
266         degrees += 360;
267
268     // Only support rotation in multiples of 90ยบ:
269     if (WTF::areEssentiallyEqual(fmod(degrees, 90.), 0.))
270         rotation.angle = clampToUnsigned(degrees);
271
272     return rotation;
273 }
274
275 class ImageDecoderAVFObjCSample : public MediaSampleAVFObjC {
276 public:
277     static Ref<ImageDecoderAVFObjCSample> create(RetainPtr<CMSampleBufferRef>&& sampleBuffer)
278     {
279         return adoptRef(*new ImageDecoderAVFObjCSample(WTFMove(sampleBuffer)));
280     }
281
282     CGImageRef image() const { return m_image.get(); }
283     void setImage(RetainPtr<CGImageRef>&& image)
284     {
285         m_image = WTFMove(image);
286         if (!m_image) {
287             m_hasAlpha = false;
288             return;
289         }
290
291         auto alphaInfo = CGImageGetAlphaInfo(m_image.get());
292         m_hasAlpha = alphaInfo != kCGImageAlphaNone && alphaInfo != kCGImageAlphaNoneSkipLast && alphaInfo != kCGImageAlphaNoneSkipFirst;
293     }
294
295     struct ByteRange {
296         size_t byteOffset { 0 };
297         size_t byteLength { 0 };
298     };
299
300     Optional<ByteRange> byteRange() const
301     {
302         if (PAL::CMSampleBufferGetDataBuffer(m_sample.get())
303             || PAL::CMSampleBufferGetImageBuffer(m_sample.get())
304             || !PAL::CMSampleBufferDataIsReady(m_sample.get()))
305             return WTF::nullopt;
306
307         CFNumberRef byteOffsetCF = (CFNumberRef)PAL::CMGetAttachment(m_sample.get(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceByteOffset, nullptr);
308         if (!byteOffsetCF || CFGetTypeID(byteOffsetCF) != CFNumberGetTypeID())
309             return WTF::nullopt;
310
311         int64_t byteOffset { 0 };
312         if (!CFNumberGetValue(byteOffsetCF, kCFNumberSInt64Type, &byteOffset))
313             return WTF::nullopt;
314
315         CMItemCount sizeArrayEntries = 0;
316         PAL::CMSampleBufferGetSampleSizeArray(m_sample.get(), 0, nullptr, &sizeArrayEntries);
317         if (sizeArrayEntries != 1)
318             return WTF::nullopt;
319
320         size_t singleSizeEntry;
321         PAL::CMSampleBufferGetSampleSizeArray(m_sample.get(), 1, &singleSizeEntry, nullptr);
322         return {{static_cast<size_t>(byteOffset), singleSizeEntry}};
323     }
324
325     SampleFlags flags() const override
326     {
327         return (SampleFlags)(MediaSampleAVFObjC::flags() | (m_hasAlpha ? HasAlpha : 0));
328     }
329
330 private:
331     ImageDecoderAVFObjCSample(RetainPtr<CMSampleBufferRef>&& sample)
332         : MediaSampleAVFObjC(WTFMove(sample))
333     {
334     }
335
336     RetainPtr<CGImageRef> m_image;
337     bool m_hasAlpha { false };
338 };
339
340 static ImageDecoderAVFObjCSample* toSample(const PresentationOrderSampleMap::value_type& pair)
341 {
342     return (ImageDecoderAVFObjCSample*)pair.second.get();
343 }
344
345 template <typename Iterator>
346 ImageDecoderAVFObjCSample* toSample(Iterator iter)
347 {
348     return (ImageDecoderAVFObjCSample*)iter->second.get();
349 }
350
351 #pragma mark - ImageDecoderAVFObjC
352
353 RefPtr<ImageDecoderAVFObjC> ImageDecoderAVFObjC::create(SharedBuffer& data, const String& mimeType, AlphaOption alphaOption, GammaAndColorProfileOption gammaAndColorProfileOption)
354 {
355     // AVFoundation may not be available at runtime.
356     if (!AVFoundationMIMETypeCache::singleton().isAvailable())
357         return nullptr;
358
359     if (!canLoad_VideoToolbox_VTCreateCGImageFromCVPixelBuffer())
360         return nullptr;
361
362     return adoptRef(*new ImageDecoderAVFObjC(data, mimeType, alphaOption, gammaAndColorProfileOption));
363 }
364
365 ImageDecoderAVFObjC::ImageDecoderAVFObjC(SharedBuffer& data, const String& mimeType, AlphaOption, GammaAndColorProfileOption)
366     : ImageDecoder()
367     , m_mimeType(mimeType)
368     , m_uti(WebCore::UTIFromMIMEType(mimeType))
369     , m_asset(adoptNS([allocAVURLAssetInstance() initWithURL:customSchemeURL() options:imageDecoderAssetOptions()]))
370     , m_loader(adoptNS([[WebCoreSharedBufferResourceLoaderDelegate alloc] initWithParent:this]))
371     , m_decompressionSession(WebCoreDecompressionSession::createRGB())
372 {
373     [m_loader updateData:data.createNSData().get() complete:NO];
374
375     [m_asset.get().resourceLoader setDelegate:m_loader.get() queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
376     [m_asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
377         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
378             protectedThis->setTrack(protectedThis->firstEnabledTrack());
379         });
380     }];
381 }
382
383 ImageDecoderAVFObjC::~ImageDecoderAVFObjC() = default;
384
385 bool ImageDecoderAVFObjC::supportsMediaType(MediaType type)
386 {
387     return type == MediaType::Video && AVFoundationMIMETypeCache::singleton().isAvailable();
388 }
389
390 bool ImageDecoderAVFObjC::supportsContentType(const ContentType& type)
391 {
392     return AVFoundationMIMETypeCache::singleton().supportsContentType(type);
393 }
394
395 bool ImageDecoderAVFObjC::canDecodeType(const String& mimeType)
396 {
397     return AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType);
398 }
399
400 AVAssetTrack *ImageDecoderAVFObjC::firstEnabledTrack()
401 {
402     // FIXME: Is AVMediaCharacteristicVisual truly optional?
403     if (!canLoadAVMediaCharacteristicVisual()) {
404         LOG(Images, "ImageDecoderAVFObjC::firstEnabledTrack(%p) - AVMediaCharacteristicVisual is not supported", this);
405         return nil;
406     }
407
408     NSArray<AVAssetTrack *> *videoTracks = [m_asset tracksWithMediaCharacteristic:AVMediaCharacteristicVisual];
409     NSUInteger firstEnabledIndex = [videoTracks indexOfObjectPassingTest:^(AVAssetTrack *track, NSUInteger, BOOL*) {
410         return track.enabled;
411     }];
412
413     if (firstEnabledIndex == NSNotFound) {
414         LOG(Images, "ImageDecoderAVFObjC::firstEnabledTrack(%p) - asset has no enabled video tracks", this);
415         return nil;
416     }
417
418     return [videoTracks objectAtIndex:firstEnabledIndex];
419 }
420
421 void ImageDecoderAVFObjC::readSamples()
422 {
423     if (!m_sampleData.empty())
424         return;
425
426     auto assetReader = adoptNS([allocAVAssetReaderInstance() initWithAsset:m_asset.get() error:nil]);
427     auto referenceOutput = adoptNS([allocAVAssetReaderSampleReferenceOutputInstance() initWithTrack:m_track.get()]);
428
429     referenceOutput.get().alwaysCopiesSampleData = NO;
430     [assetReader addOutput:referenceOutput.get()];
431     [assetReader startReading];
432
433     while (auto sampleBuffer = adoptCF([referenceOutput copyNextSampleBuffer])) {
434         // NOTE: Some samples emitted by the AVAssetReader simply denote the boundary of edits
435         // and do not carry media data.
436         if (!(PAL::CMSampleBufferGetNumSamples(sampleBuffer.get())))
437             continue;
438         m_sampleData.addSample(ImageDecoderAVFObjCSample::create(WTFMove(sampleBuffer)).get());
439     }
440
441     if (m_encodedDataStatusChangedCallback)
442         m_encodedDataStatusChangedCallback(encodedDataStatus());
443 }
444
445 void ImageDecoderAVFObjC::readTrackMetadata()
446 {
447     if (!m_rotation)
448         m_rotation = transformToRotationProperties(CGAffineTransformConcat(m_asset.get().preferredTransform, m_track.get().preferredTransform));
449
450     if (!m_size) {
451         auto size = FloatSize(m_track.get().naturalSize);
452         auto angle = m_rotation.value().angle;
453         if (angle == 90 || angle == 270)
454             size = size.transposedSize();
455
456         m_size = expandedIntSize(size);
457     }
458 }
459
460 bool ImageDecoderAVFObjC::storeSampleBuffer(CMSampleBufferRef sampleBuffer)
461 {
462     auto pixelBuffer = m_decompressionSession->decodeSampleSync(sampleBuffer);
463     if (!pixelBuffer) {
464         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not decode sampleBuffer", this);
465         return false;
466     }
467
468     auto presentationTime = PAL::toMediaTime(PAL::CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
469     auto iter = m_sampleData.presentationOrder().findSampleWithPresentationTime(presentationTime);
470
471     if (m_rotation && !m_rotation.value().isIdentity()) {
472         auto& rotation = m_rotation.value();
473         if (!m_rotationSession) {
474             VTImageRotationSessionRef rawRotationSession = nullptr;
475             VTImageRotationSessionCreate(kCFAllocatorDefault, rotation.angle, &rawRotationSession);
476             m_rotationSession = rawRotationSession;
477             VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_EnableHighSpeedTransfer, kCFBooleanTrue);
478
479             if (rotation.flipY)
480                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipVerticalOrientation, kCFBooleanTrue);
481             if (rotation.flipX)
482                 VTImageRotationSessionSetProperty(m_rotationSession.get(), kVTImageRotationPropertyKey_FlipHorizontalOrientation, kCFBooleanTrue);
483         }
484
485         if (!m_rotationPool) {
486             auto pixelAttributes = @{
487                 (__bridge NSString *)kCVPixelBufferWidthKey: @(m_size.value().width()),
488                 (__bridge NSString *)kCVPixelBufferHeightKey: @(m_size.value().height()),
489                 (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
490                 (__bridge NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
491             };
492             CVPixelBufferPoolRef rawPool = nullptr;
493             CVPixelBufferPoolCreate(kCFAllocatorDefault, nullptr, (__bridge CFDictionaryRef)pixelAttributes, &rawPool);
494             m_rotationPool = adoptCF(rawPool);
495         }
496
497         CVPixelBufferRef rawRotatedBuffer = nullptr;
498         CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, m_rotationPool.get(), &rawRotatedBuffer);
499         auto status = VTImageRotationSessionTransferImage(m_rotationSession.get(), pixelBuffer.get(), rawRotatedBuffer);
500         if (status == noErr)
501             pixelBuffer = adoptCF(rawRotatedBuffer);
502     }
503
504     CGImageRef rawImage = nullptr;
505     if (noErr != VTCreateCGImageFromCVPixelBuffer(pixelBuffer.get(), nullptr, &rawImage)) {
506         LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not create CGImage from pixelBuffer", this);
507         return false;
508     }
509
510     ASSERT(iter != m_sampleData.presentationOrder().end());
511     toSample(iter)->setImage(adoptCF(rawImage));
512
513     return true;
514 }
515
516 void ImageDecoderAVFObjC::advanceCursor()
517 {
518     if (m_cursor == m_sampleData.decodeOrder().end() || ++m_cursor == m_sampleData.decodeOrder().end())
519         m_cursor = m_sampleData.decodeOrder().begin();
520 }
521
522 void ImageDecoderAVFObjC::setTrack(AVAssetTrack *track)
523 {
524     if (m_track == track)
525         return;
526     m_track = track;
527
528     LockHolder holder { m_sampleGeneratorLock };
529     m_sampleData.clear();
530     m_size.reset();
531     m_rotation.reset();
532     m_cursor = m_sampleData.decodeOrder().end();
533     m_rotationSession = nullptr;
534
535     [track loadValuesAsynchronouslyForKeys:@[@"naturalSize", @"preferredTransform"] completionHandler:[protectedThis = makeRefPtr(this)] () mutable {
536         callOnMainThread([protectedThis = WTFMove(protectedThis)] {
537             protectedThis->readTrackMetadata();
538             protectedThis->readSamples();
539         });
540     }];
541 }
542
543 void ImageDecoderAVFObjC::setEncodedDataStatusChangeCallback(WTF::Function<void(EncodedDataStatus)>&& callback)
544 {
545     m_encodedDataStatusChangedCallback = WTFMove(callback);
546 }
547
548 EncodedDataStatus ImageDecoderAVFObjC::encodedDataStatus() const
549 {
550     if (!m_sampleData.empty())
551         return EncodedDataStatus::Complete;
552     if (m_size)
553         return EncodedDataStatus::SizeAvailable;
554     if (m_track)
555         return EncodedDataStatus::TypeAvailable;
556     return EncodedDataStatus::Unknown;
557 }
558
559 IntSize ImageDecoderAVFObjC::size() const
560 {
561     if (m_size)
562         return m_size.value();
563     return IntSize();
564 }
565
566 size_t ImageDecoderAVFObjC::frameCount() const
567 {
568     return m_sampleData.size();
569 }
570
571 RepetitionCount ImageDecoderAVFObjC::repetitionCount() const
572 {
573     // In the absence of instructions to the contrary, assume all media formats repeat infinitely.
574     // FIXME: Future media formats may embed repeat count information, and when that is available
575     // through AVAsset, account for it here.
576     return frameCount() > 1 ? RepetitionCountInfinite : RepetitionCountNone;
577 }
578
579 String ImageDecoderAVFObjC::uti() const
580 {
581     return m_uti;
582 }
583
584 String ImageDecoderAVFObjC::filenameExtension() const
585 {
586     return MIMETypeRegistry::getPreferredExtensionForMIMEType(m_mimeType);
587 }
588
589 IntSize ImageDecoderAVFObjC::frameSizeAtIndex(size_t, SubsamplingLevel) const
590 {
591     return size();
592 }
593
594 bool ImageDecoderAVFObjC::frameIsCompleteAtIndex(size_t index) const
595 {
596     auto* sampleData = sampleAtIndex(index);
597     if (!sampleData)
598         return false;
599
600     return sampleIsComplete(*sampleData);
601 }
602
603 ImageOrientation ImageDecoderAVFObjC::frameOrientationAtIndex(size_t) const
604 {
605     return ImageOrientation();
606 }
607
608 Seconds ImageDecoderAVFObjC::frameDurationAtIndex(size_t index) const
609 {
610     auto* sampleData = sampleAtIndex(index);
611     if (!sampleData)
612         return { };
613
614     return Seconds(sampleData->duration().toDouble());
615 }
616
617 bool ImageDecoderAVFObjC::frameHasAlphaAtIndex(size_t index) const
618 {
619     auto* sampleData = sampleAtIndex(index);
620     return sampleData ? sampleData->hasAlpha() : false;
621 }
622
623 bool ImageDecoderAVFObjC::frameAllowSubsamplingAtIndex(size_t index) const
624 {
625     return index <= m_sampleData.size();
626 }
627
628 unsigned ImageDecoderAVFObjC::frameBytesAtIndex(size_t index, SubsamplingLevel subsamplingLevel) const
629 {
630     if (!frameIsCompleteAtIndex(index))
631         return 0;
632
633     IntSize frameSize = frameSizeAtIndex(index, subsamplingLevel);
634     return (frameSize.area() * 4).unsafeGet();
635 }
636
637 NativeImagePtr ImageDecoderAVFObjC::createFrameImageAtIndex(size_t index, SubsamplingLevel, const DecodingOptions&)
638 {
639     LockHolder holder { m_sampleGeneratorLock };
640
641     auto* sampleData = sampleAtIndex(index);
642     if (!sampleData)
643         return nullptr;
644
645     if (auto image = sampleData->image())
646         return image;
647
648     if (m_cursor == m_sampleData.decodeOrder().end())
649         m_cursor = m_sampleData.decodeOrder().begin();
650
651     auto decodeTime = sampleData->decodeTime();
652
653     if (decodeTime < m_cursor->second->decodeTime()) {
654         // Rewind cursor to the last sync sample to begin decoding
655         m_cursor = m_sampleData.decodeOrder().findSampleWithDecodeKey({decodeTime, sampleData->presentationTime()});
656         do {
657             if (m_cursor->second->isSync())
658                 break;
659         } while (--m_cursor != m_sampleData.decodeOrder().begin());
660     }
661
662     RetainPtr<CGImageRef> image;
663     while (true) {
664         if (decodeTime < m_cursor->second->decodeTime())
665             return nullptr;
666
667         auto cursorSample = toSample(m_cursor);
668         if (!cursorSample)
669             return nullptr;
670
671         if (!sampleIsComplete(*cursorSample))
672             return nullptr;
673
674         if (auto byteRange = cursorSample->byteRange()) {
675             auto& byteRangeValue = byteRange.value();
676             auto* data = m_loader.get().data;
677             CMBlockBufferCustomBlockSource source {
678                 0,
679                 nullptr,
680                 [](void* refcon, void*, size_t) {
681                     [(id)refcon release];
682                 },
683                 [data retain]
684             };
685             CMBlockBufferRef rawBlockBuffer = nullptr;
686             if (noErr != PAL::CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, const_cast<void*>(data.bytes), data.length, nullptr, &source, byteRangeValue.byteOffset, byteRangeValue.byteLength, 0, &rawBlockBuffer))
687                 return nullptr;
688
689             if (!rawBlockBuffer)
690                 return nullptr;
691
692             if (noErr != PAL::CMSampleBufferSetDataBuffer(cursorSample->sampleBuffer(), rawBlockBuffer))
693                 return nullptr;
694             CFRelease(rawBlockBuffer);
695
696             PAL::CMRemoveAttachment(cursorSample->sampleBuffer(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceByteOffset);
697             PAL::CMRemoveAttachment(cursorSample->sampleBuffer(), PAL::kCMSampleBufferAttachmentKey_SampleReferenceURL);
698         }
699
700         auto cursorSampleBuffer = cursorSample->sampleBuffer();
701         if (!cursorSampleBuffer)
702             break;
703
704         if (!storeSampleBuffer(cursorSampleBuffer))
705             break;
706
707         advanceCursor();
708         if (auto image = sampleData->image())
709             return image;
710     }
711
712     advanceCursor();
713     return nullptr;
714 }
715
716 void ImageDecoderAVFObjC::setExpectedContentSize(long long expectedContentSize)
717 {
718     m_loader.get().expectedContentSize = expectedContentSize;
719 }
720
721 void ImageDecoderAVFObjC::setData(SharedBuffer& data, bool allDataReceived)
722 {
723     [m_loader updateData:data.createNSData().get() complete:allDataReceived];
724
725     if (allDataReceived) {
726         m_isAllDataReceived = true;
727
728         if (!m_track)
729             setTrack(firstEnabledTrack());
730
731         if (!m_track)
732             return;
733
734         readTrackMetadata();
735         readSamples();
736     }
737 }
738
739 void ImageDecoderAVFObjC::clearFrameBufferCache(size_t index)
740 {
741     size_t i = 0;
742     for (auto& samplePair : m_sampleData.presentationOrder()) {
743         toSample(samplePair)->setImage(nullptr);
744         if (++i > index)
745             break;
746     }
747 }
748
749 const ImageDecoderAVFObjCSample* ImageDecoderAVFObjC::sampleAtIndex(size_t index) const
750 {
751     if (index >= m_sampleData.presentationOrder().size())
752         return nullptr;
753
754     // FIXME: std::map is not random-accessible; this can get expensive if callers repeatedly call
755     // with monotonically increasing indexes. Investigate adding an O(1) side structure to make this
756     // style of access faster.
757     auto iter = m_sampleData.presentationOrder().begin();
758     for (size_t i = 0; i != index; ++i)
759         ++iter;
760
761     return toSample(iter);
762 }
763
764 bool ImageDecoderAVFObjC::sampleIsComplete(const ImageDecoderAVFObjCSample& sample) const
765 {
766     if (auto byteRange = sample.byteRange()) {
767         auto& byteRangeValue = byteRange.value();
768         return byteRangeValue.byteOffset + byteRangeValue.byteLength <= m_loader.get().data.length;
769     }
770
771     return PAL::CMSampleBufferDataIsReady(sample.sampleBuffer());
772 }
773
774 }
775
776 #endif