53595e2ab7fc8bbd5abb75ea3bc15522cc959837
[WebKit-https.git] / Source / WebCore / platform / graphics / cocoa / WebCoreDecompressionSession.mm
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "WebCoreDecompressionSession.h"
28
29 #if USE(VIDEOTOOLBOX)
30
31 #import "Logging.h"
32 #import "PixelBufferConformerCV.h"
33 #import <CoreMedia/CMBufferQueue.h>
34 #import <CoreMedia/CMFormatDescription.h>
35 #import <pal/avfoundation/MediaTimeAVFoundation.h>
36 #import <wtf/CurrentTime.h>
37 #import <wtf/MainThread.h>
38 #import <wtf/MediaTime.h>
39 #import <wtf/StringPrintStream.h>
40 #import <wtf/Vector.h>
41 #import <wtf/cf/TypeCastsCF.h>
42
43 #import <pal/cf/CoreMediaSoftLink.h>
44 #import "CoreVideoSoftLink.h"
45 #import "VideoToolboxSoftLink.h"
46
47 using namespace PAL;
48
49 WTF_DECLARE_CF_TYPE_TRAIT(CMSampleBuffer);
50
51 namespace WebCore {
52
53 WebCoreDecompressionSession::WebCoreDecompressionSession(Mode mode)
54     : m_mode(mode)
55     , m_decompressionQueue(adoptOSObject(dispatch_queue_create("WebCoreDecompressionSession Decompression Queue", DISPATCH_QUEUE_SERIAL)))
56     , m_enqueingQueue(adoptOSObject(dispatch_queue_create("WebCoreDecompressionSession Enqueueing Queue", DISPATCH_QUEUE_SERIAL)))
57     , m_hasAvailableImageSemaphore(adoptOSObject(dispatch_semaphore_create(0)))
58 {
59 }
60
61 void WebCoreDecompressionSession::invalidate()
62 {
63     m_invalidated = true;
64     m_notificationCallback = nullptr;
65     m_hasAvailableFrameCallback = nullptr;
66     setTimebase(nullptr);
67     if (m_timerSource)
68         dispatch_source_cancel(m_timerSource.get());
69 }
70
71 void WebCoreDecompressionSession::setTimebase(CMTimebaseRef timebase)
72 {
73     if (m_timebase == timebase)
74         return;
75
76     if (m_timebase)
77         CMTimebaseRemoveTimerDispatchSource(m_timebase.get(), m_timerSource.get());
78
79     m_timebase = timebase;
80
81     if (m_timebase) {
82         if (!m_timerSource) {
83             m_timerSource = adoptOSObject(dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, dispatch_get_main_queue()));
84             dispatch_source_set_event_handler(m_timerSource.get(), [this] {
85                 automaticDequeue();
86             });
87             dispatch_activate(m_timerSource.get());
88         }
89         CMTimebaseAddTimerDispatchSource(m_timebase.get(), m_timerSource.get());
90     }
91 }
92
93 void WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData()
94 {
95     if (!isReadyForMoreMediaData() || !m_notificationCallback)
96         return;
97
98     LOG(Media, "WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData(%p) - isReadyForMoreMediaData(%d), hasCallback(%d)", this, isReadyForMoreMediaData(), !!m_notificationCallback);
99
100     if (isMainThread()) {
101         m_notificationCallback();
102         return;
103     }
104
105     RefPtr<WebCoreDecompressionSession> protectedThis { this };
106     dispatch_async(dispatch_get_main_queue(), [protectedThis] {
107         if (protectedThis->m_notificationCallback)
108             protectedThis->m_notificationCallback();
109     });
110 }
111
112 void WebCoreDecompressionSession::enqueueSample(CMSampleBufferRef sampleBuffer, bool displaying)
113 {
114     CMItemCount itemCount = 0;
115     if (noErr != CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, nullptr, &itemCount))
116         return;
117
118     Vector<CMSampleTimingInfo> timingInfoArray;
119     timingInfoArray.grow(itemCount);
120     if (noErr != CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, itemCount, timingInfoArray.data(), nullptr))
121         return;
122
123     if (!m_decompressionQueue)
124         m_decompressionQueue = adoptOSObject(dispatch_queue_create("SourceBufferPrivateAVFObjC Decompression Queue", DISPATCH_QUEUE_SERIAL));
125
126     // CMBufferCallbacks contains 64-bit pointers that aren't 8-byte aligned. To suppress the linker
127     // warning about this, we prepend 4 bytes of padding when building for macOS.
128 #if PLATFORM(MAC)
129     const size_t padSize = 4;
130 #else
131     const size_t padSize = 0;
132 #endif
133
134     if (!m_producerQueue) {
135         CMBufferQueueRef outQueue { nullptr };
136 #pragma pack(push, 4)
137         struct { uint8_t pad[padSize]; CMBufferCallbacks callbacks; } callbacks { { }, {
138             0,
139             nullptr,
140             &getDecodeTime,
141             &getPresentationTime,
142             &getDuration,
143             nullptr,
144             &compareBuffers,
145             nullptr,
146             nullptr,
147         } };
148 #pragma pack(pop)
149         CMBufferQueueCreate(kCFAllocatorDefault, kMaximumCapacity, &callbacks.callbacks, &outQueue);
150         m_producerQueue = adoptCF(outQueue);
151     }
152
153     if (!m_consumerQueue) {
154         CMBufferQueueRef outQueue { nullptr };
155 #pragma pack(push, 4)
156         struct { uint8_t pad[padSize]; CMBufferCallbacks callbacks; } callbacks { { }, {
157             0,
158             nullptr,
159             &getDecodeTime,
160             &getPresentationTime,
161             &getDuration,
162             nullptr,
163             &compareBuffers,
164             nullptr,
165             nullptr,
166         } };
167 #pragma pack(pop)
168         CMBufferQueueCreate(kCFAllocatorDefault, kMaximumCapacity, &callbacks.callbacks, &outQueue);
169         m_consumerQueue = adoptCF(outQueue);
170     }
171
172     ++m_framesBeingDecoded;
173
174     LOG(Media, "WebCoreDecompressionSession::enqueueSample(%p) - framesBeingDecoded(%d)", this, m_framesBeingDecoded);
175
176     dispatch_async(m_decompressionQueue.get(), [protectedThis = makeRefPtr(*this), strongBuffer = retainPtr(sampleBuffer), displaying] {
177         protectedThis->decodeSample(strongBuffer.get(), displaying);
178     });
179 }
180
181 bool WebCoreDecompressionSession::shouldDecodeSample(CMSampleBufferRef sample, bool displaying)
182 {
183     if (!displaying)
184         return true;
185
186     if (!m_timebase)
187         return true;
188
189     auto currentTime = CMTimebaseGetTime(m_timebase.get());
190     auto presentationStartTime = CMSampleBufferGetPresentationTimeStamp(sample);
191     auto duration = CMSampleBufferGetDuration(sample);
192     auto presentationEndTime = CMTimeAdd(presentationStartTime, duration);
193     if (CMTimeCompare(presentationEndTime, currentTime) >= 0)
194         return true;
195
196     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
197     if (!attachments)
198         return true;
199
200     for (CFIndex index = 0, count = CFArrayGetCount(attachments); index < count; ++index) {
201         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, index);
202         CFBooleanRef dependedOn = (CFBooleanRef)CFDictionaryGetValue(attachmentDict, kCMSampleAttachmentKey_IsDependedOnByOthers);
203         if (dependedOn && !CFBooleanGetValue(dependedOn))
204             return false;
205     }
206
207     return true;
208 }
209
210 void WebCoreDecompressionSession::ensureDecompressionSessionForSample(CMSampleBufferRef sample)
211 {
212     if (isInvalidated())
213         return;
214
215     CMVideoFormatDescriptionRef videoFormatDescription = CMSampleBufferGetFormatDescription(sample);
216     if (m_decompressionSession && !VTDecompressionSessionCanAcceptFormatDescription(m_decompressionSession.get(), videoFormatDescription)) {
217         VTDecompressionSessionWaitForAsynchronousFrames(m_decompressionSession.get());
218         m_decompressionSession = nullptr;
219     }
220
221     if (!m_decompressionSession) {
222         CMVideoFormatDescriptionRef videoFormatDescription = CMSampleBufferGetFormatDescription(sample);
223         NSDictionary* videoDecoderSpecification = @{ (NSString *)kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder: @YES };
224
225         NSDictionary *attributes;
226         if (m_mode == OpenGL) {
227             attributes = nil;
228         } else {
229             ASSERT(m_mode == RGB);
230             attributes = @{(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
231         }
232         VTDecompressionSessionRef decompressionSessionOut = nullptr;
233         if (noErr == VTDecompressionSessionCreate(kCFAllocatorDefault, videoFormatDescription, (CFDictionaryRef)videoDecoderSpecification, (CFDictionaryRef)attributes, nullptr, &decompressionSessionOut)) {
234             m_decompressionSession = adoptCF(decompressionSessionOut);
235             CFArrayRef rawSuggestedQualityOfServiceTiers = nullptr;
236             VTSessionCopyProperty(decompressionSessionOut, kVTDecompressionPropertyKey_SuggestedQualityOfServiceTiers, kCFAllocatorDefault, &rawSuggestedQualityOfServiceTiers);
237             m_qosTiers = adoptCF(rawSuggestedQualityOfServiceTiers);
238             m_currentQosTier = 0;
239             resetQosTier();
240         }
241     }
242 }
243
244 void WebCoreDecompressionSession::decodeSample(CMSampleBufferRef sample, bool displaying)
245 {
246     if (isInvalidated())
247         return;
248
249     ensureDecompressionSessionForSample(sample);
250
251     VTDecodeInfoFlags flags { kVTDecodeFrame_EnableTemporalProcessing };
252     if (!displaying)
253         flags |= kVTDecodeFrame_DoNotOutputFrame;
254
255     if (!shouldDecodeSample(sample, displaying)) {
256         ++m_totalVideoFrames;
257         ++m_droppedVideoFrames;
258         --m_framesBeingDecoded;
259         maybeBecomeReadyForMoreMediaData();
260         return;
261     }
262
263     MonotonicTime startTime = MonotonicTime::now();
264     VTDecompressionSessionDecodeFrameWithOutputHandler(m_decompressionSession.get(), sample, flags, nullptr, [this, displaying, startTime](OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef imageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration) {
265         double deltaRatio = (MonotonicTime::now() - startTime).seconds() / CMTimeGetSeconds(presentationDuration);
266
267         updateQosWithDecodeTimeStatistics(deltaRatio);
268         handleDecompressionOutput(displaying, status, infoFlags, imageBuffer, presentationTimeStamp, presentationDuration);
269     });
270 }
271
272 RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::decodeSampleSync(CMSampleBufferRef sample)
273 {
274     if (isInvalidated())
275         return nullptr;
276
277     ensureDecompressionSessionForSample(sample);
278
279     RetainPtr<CVPixelBufferRef> pixelBuffer;
280     VTDecodeInfoFlags flags { 0 };
281     VTDecompressionSessionDecodeFrameWithOutputHandler(m_decompressionSession.get(), sample, flags, nullptr, [&] (OSStatus, VTDecodeInfoFlags, CVImageBufferRef imageBuffer, CMTime, CMTime) mutable {
282         if (imageBuffer && CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID())
283             pixelBuffer = (CVPixelBufferRef)imageBuffer;
284     });
285     return pixelBuffer;
286 }
287
288 void WebCoreDecompressionSession::handleDecompressionOutput(bool displaying, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef rawImageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration)
289 {
290     ++m_totalVideoFrames;
291     if (infoFlags & kVTDecodeInfo_FrameDropped)
292         ++m_droppedVideoFrames;
293
294     CMVideoFormatDescriptionRef rawImageBufferDescription = nullptr;
295     if (status != noErr || noErr != CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rawImageBuffer, &rawImageBufferDescription)) {
296         ++m_corruptedVideoFrames;
297         --m_framesBeingDecoded;
298         maybeBecomeReadyForMoreMediaData();
299         return;
300     }
301     RetainPtr<CMVideoFormatDescriptionRef> imageBufferDescription = adoptCF(rawImageBufferDescription);
302
303     CMSampleTimingInfo imageBufferTiming {
304         presentationDuration,
305         presentationTimeStamp,
306         presentationTimeStamp,
307     };
308
309     CMSampleBufferRef rawImageSampleBuffer = nullptr;
310     if (noErr != CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, rawImageBuffer, imageBufferDescription.get(), &imageBufferTiming, &rawImageSampleBuffer)) {
311         ++m_corruptedVideoFrames;
312         --m_framesBeingDecoded;
313         maybeBecomeReadyForMoreMediaData();
314         return;
315     }
316
317     dispatch_async(m_enqueingQueue.get(), [protectedThis = makeRefPtr(this), imageSampleBuffer = adoptCF(rawImageSampleBuffer), displaying] {
318         protectedThis->enqueueDecodedSample(imageSampleBuffer.get(), displaying);
319     });
320 }
321
322 RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::getFirstVideoFrame()
323 {
324     if (!m_producerQueue || CMBufferQueueIsEmpty(m_producerQueue.get()))
325         return nullptr;
326
327     RetainPtr<CMSampleBufferRef> currentSample = adoptCF(checked_cf_cast<CMSampleBufferRef>(CMBufferQueueDequeueAndRetain(m_producerQueue.get())));
328     RetainPtr<CVPixelBufferRef> imageBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(currentSample.get());
329     ASSERT(CFGetTypeID(imageBuffer.get()) == CVPixelBufferGetTypeID());
330
331     maybeBecomeReadyForMoreMediaData();
332
333     return imageBuffer;
334 }
335
336 void WebCoreDecompressionSession::automaticDequeue()
337 {
338     if (!m_timebase)
339         return;
340
341     auto time = PAL::toMediaTime(CMTimebaseGetTime(m_timebase.get()));
342     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - purging all samples before time(%s)", this, toString(time).utf8().data());
343
344     MediaTime nextFireTime = MediaTime::positiveInfiniteTime();
345     bool releasedImageBuffers = false;
346
347     while (CMSampleBufferRef firstSample = checked_cf_cast<CMSampleBufferRef>(CMBufferQueueGetHead(m_producerQueue.get()))) {
348         MediaTime presentationTimestamp = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
349         MediaTime duration = PAL::toMediaTime(CMSampleBufferGetDuration(firstSample));
350         MediaTime presentationEndTimestamp = presentationTimestamp + duration;
351         if (time > presentationEndTimestamp) {
352             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
353             releasedImageBuffers = true;
354             continue;
355         }
356
357 #if !LOG_DISABLED
358         auto begin = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
359         auto end = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
360         LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue(%s -> %s)", this, toString(begin).utf8().data(), toString(end).utf8().data());
361 #endif
362
363         nextFireTime = presentationEndTimestamp;
364         break;
365     }
366
367     if (releasedImageBuffers)
368         maybeBecomeReadyForMoreMediaData();
369
370     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue empty", this);
371     CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), PAL::toCMTime(nextFireTime), 0);
372 }
373
374 void WebCoreDecompressionSession::enqueueDecodedSample(CMSampleBufferRef sample, bool displaying)
375 {
376     if (isInvalidated())
377         return;
378
379     --m_framesBeingDecoded;
380
381     if (!displaying) {
382         maybeBecomeReadyForMoreMediaData();
383         return;
384     }
385
386     bool shouldNotify = true;
387
388     if (displaying && m_timebase) {
389         auto currentRate = CMTimebaseGetRate(m_timebase.get());
390         auto currentTime = PAL::toMediaTime(CMTimebaseGetTime(m_timebase.get()));
391         auto presentationStartTime = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(sample));
392         auto presentationEndTime = presentationStartTime + PAL::toMediaTime(CMSampleBufferGetDuration(sample));
393         if (currentTime < presentationStartTime || currentTime >= presentationEndTime)
394             shouldNotify = false;
395
396         if (currentRate > 0 && presentationEndTime < currentTime) {
397 #if !LOG_DISABLED
398             auto begin = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
399             auto end = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
400             LOG(Media, "WebCoreDecompressionSession::enqueueDecodedSample(%p) - dropping frame late by %s, framesBeingDecoded(%d), producerQueue(%s -> %s)", this, toString(presentationEndTime - currentTime).utf8().data(), m_framesBeingDecoded, toString(begin).utf8().data(), toString(end).utf8().data());
401 #endif
402             ++m_droppedVideoFrames;
403             return;
404         }
405     }
406
407     CMBufferQueueEnqueue(m_producerQueue.get(), sample);
408
409 #if !LOG_DISABLED
410     auto begin = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
411     auto end = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
412     auto presentationTime = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(sample));
413     LOG(Media, "WebCoreDecompressionSession::enqueueDecodedSample(%p) - presentationTime(%s), framesBeingDecoded(%d), producerQueue(%s -> %s)", this, toString(presentationTime).utf8().data(), m_framesBeingDecoded, toString(begin).utf8().data(), toString(end).utf8().data());
414 #endif
415
416     if (m_timebase)
417         CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
418
419     if (!m_hasAvailableFrameCallback)
420         return;
421
422     if (!shouldNotify)
423         return;
424
425     dispatch_async(dispatch_get_main_queue(), [protectedThis = makeRefPtr(this), callback = WTFMove(m_hasAvailableFrameCallback)] {
426         callback();
427     });
428 }
429
430 bool WebCoreDecompressionSession::isReadyForMoreMediaData() const
431 {
432     CMItemCount producerCount = m_producerQueue ? CMBufferQueueGetBufferCount(m_producerQueue.get()) : 0;
433     return m_framesBeingDecoded + producerCount <= kHighWaterMark;
434 }
435
436 void WebCoreDecompressionSession::requestMediaDataWhenReady(std::function<void()> notificationCallback)
437 {
438     LOG(Media, "WebCoreDecompressionSession::requestMediaDataWhenReady(%p), hasNotificationCallback(%d)", this, !!notificationCallback);
439     m_notificationCallback = notificationCallback;
440
441     if (notificationCallback && isReadyForMoreMediaData()) {
442         RefPtr<WebCoreDecompressionSession> protectedThis { this };
443         dispatch_async(dispatch_get_main_queue(), [protectedThis] {
444             if (protectedThis->m_notificationCallback)
445                 protectedThis->m_notificationCallback();
446         });
447     }
448 }
449
450 void WebCoreDecompressionSession::stopRequestingMediaData()
451 {
452     LOG(Media, "WebCoreDecompressionSession::stopRequestingMediaData(%p)", this);
453     m_notificationCallback = nullptr;
454 }
455
456 void WebCoreDecompressionSession::notifyWhenHasAvailableVideoFrame(std::function<void()> callback)
457 {
458     if (callback && m_producerQueue && !CMBufferQueueIsEmpty(m_producerQueue.get())) {
459         dispatch_async(dispatch_get_main_queue(), [callback] {
460             callback();
461         });
462         return;
463     }
464     m_hasAvailableFrameCallback = callback;
465 }
466
467 RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::imageForTime(const MediaTime& time, ImageForTimeFlags flags)
468 {
469     if (CMBufferQueueIsEmpty(m_producerQueue.get())) {
470         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - time(%s), queue empty", this, toString(time).utf8().data());
471         return nullptr;
472     }
473
474     bool allowEarlier = flags == WebCoreDecompressionSession::AllowEarlier;
475     bool allowLater = flags == WebCoreDecompressionSession::AllowLater;
476
477     MediaTime startTime = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
478     MediaTime endTime = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
479     if (!allowLater && time < startTime) {
480         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - time(%s) too early for queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
481         return nullptr;
482     }
483
484     bool releasedImageBuffers = false;
485
486     while (CMSampleBufferRef firstSample = checked_cf_cast<CMSampleBufferRef>(CMBufferQueueGetHead(m_producerQueue.get()))) {
487         MediaTime presentationTimestamp = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
488         MediaTime duration = PAL::toMediaTime(CMSampleBufferGetDuration(firstSample));
489         MediaTime presentationEndTimestamp = presentationTimestamp + duration;
490         if (!allowLater && presentationTimestamp > time)
491             return nullptr;
492         if (!allowEarlier && presentationEndTimestamp < time) {
493             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
494             releasedImageBuffers = true;
495             continue;
496         }
497
498         RetainPtr<CMSampleBufferRef> currentSample = adoptCF(checked_cf_cast<CMSampleBufferRef>(CMBufferQueueDequeueAndRetain(m_producerQueue.get())));
499         RetainPtr<CVPixelBufferRef> imageBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(currentSample.get());
500         ASSERT(CFGetTypeID(imageBuffer.get()) == CVPixelBufferGetTypeID());
501
502         if (m_timebase)
503             CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
504
505         maybeBecomeReadyForMoreMediaData();
506
507         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - found sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
508         return imageBuffer;
509     }
510
511     if (m_timebase)
512         CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
513
514     if (releasedImageBuffers)
515         maybeBecomeReadyForMoreMediaData();
516
517     LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - no matching sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
518     return nullptr;
519 }
520
521 void WebCoreDecompressionSession::flush()
522 {
523     dispatch_sync(m_decompressionQueue.get(), [protectedThis = RefPtr<WebCoreDecompressionSession>(this)] {
524         CMBufferQueueReset(protectedThis->m_producerQueue.get());
525         dispatch_sync(protectedThis->m_enqueingQueue.get(), [protectedThis] {
526             CMBufferQueueReset(protectedThis->m_consumerQueue.get());
527             protectedThis->m_framesSinceLastQosCheck = 0;
528             protectedThis->m_currentQosTier = 0;
529             protectedThis->resetQosTier();
530         });
531     });
532 }
533
534 CMTime WebCoreDecompressionSession::getDecodeTime(CMBufferRef buf, void*)
535 {
536     CMSampleBufferRef sample = checked_cf_cast<CMSampleBufferRef>(buf);
537     return CMSampleBufferGetDecodeTimeStamp(sample);
538 }
539
540 CMTime WebCoreDecompressionSession::getPresentationTime(CMBufferRef buf, void*)
541 {
542     CMSampleBufferRef sample = checked_cf_cast<CMSampleBufferRef>(buf);
543     return CMSampleBufferGetPresentationTimeStamp(sample);
544 }
545
546 CMTime WebCoreDecompressionSession::getDuration(CMBufferRef buf, void*)
547 {
548     CMSampleBufferRef sample = checked_cf_cast<CMSampleBufferRef>(buf);
549     return CMSampleBufferGetDuration(sample);
550 }
551
552 CFComparisonResult WebCoreDecompressionSession::compareBuffers(CMBufferRef buf1, CMBufferRef buf2, void* refcon)
553 {
554     return (CFComparisonResult)CMTimeCompare(getPresentationTime(buf1, refcon), getPresentationTime(buf2, refcon));
555 }
556
557 void WebCoreDecompressionSession::resetQosTier()
558 {
559     if (!m_qosTiers || !m_decompressionSession)
560         return;
561
562     if (m_currentQosTier < 0 || m_currentQosTier >= CFArrayGetCount(m_qosTiers.get()))
563         return;
564
565     auto tier = (CFDictionaryRef)CFArrayGetValueAtIndex(m_qosTiers.get(), m_currentQosTier);
566     LOG(Media, "WebCoreDecompressionSession::resetQosTier(%p) - currentQosTier(%ld), tier(%@)", this, m_currentQosTier, [(NSDictionary *)tier description]);
567
568     VTSessionSetProperties(m_decompressionSession.get(), tier);
569     m_framesSinceLastQosCheck = 0;
570 }
571
572 void WebCoreDecompressionSession::increaseQosTier()
573 {
574     if (!m_qosTiers)
575         return;
576
577     if (m_currentQosTier + 1 >= CFArrayGetCount(m_qosTiers.get()))
578         return;
579
580     ++m_currentQosTier;
581     resetQosTier();
582 }
583
584 void WebCoreDecompressionSession::decreaseQosTier()
585 {
586     if (!m_qosTiers)
587         return;
588
589     if (m_currentQosTier <= 0)
590         return;
591
592     --m_currentQosTier;
593     resetQosTier();
594 }
595
596 void WebCoreDecompressionSession::updateQosWithDecodeTimeStatistics(double ratio)
597 {
598     static const double kMovingAverageAlphaValue = 0.1;
599     static const unsigned kNumberOfFramesBeforeSwitchingTiers = 60;
600     static const double kHighWaterDecodeRatio = 1.;
601     static const double kLowWaterDecodeRatio = 0.5;
602
603     if (!m_timebase)
604         return;
605
606     double rate = CMTimebaseGetRate(m_timebase.get());
607     if (!rate)
608         rate = 1;
609
610     m_decodeRatioMovingAverage += kMovingAverageAlphaValue * (ratio - m_decodeRatioMovingAverage) * rate;
611     if (++m_framesSinceLastQosCheck < kNumberOfFramesBeforeSwitchingTiers)
612         return;
613
614     LOG(Media, "WebCoreDecompressionSession::updateQosWithDecodeTimeStatistics(%p) - framesSinceLastQosCheck(%ld), decodeRatioMovingAverage(%g)", this, m_framesSinceLastQosCheck, m_decodeRatioMovingAverage);
615     if (m_decodeRatioMovingAverage > kHighWaterDecodeRatio)
616         increaseQosTier();
617     else if (m_decodeRatioMovingAverage < kLowWaterDecodeRatio)
618         decreaseQosTier();
619     m_framesSinceLastQosCheck = 0;
620 }
621
622 }
623
624 #endif