[WTF] Move currentCPUTime and sleep(Seconds) to CPUTime.h and Seconds.h respectively
[WebKit-https.git] / Source / WebCore / platform / graphics / cocoa / WebCoreDecompressionSession.mm
1 /*
2  * Copyright (C) 2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "WebCoreDecompressionSession.h"
28
29 #if USE(VIDEOTOOLBOX)
30
31 #import "Logging.h"
32 #import "PixelBufferConformerCV.h"
33 #import <CoreMedia/CMBufferQueue.h>
34 #import <CoreMedia/CMFormatDescription.h>
35 #import <pal/avfoundation/MediaTimeAVFoundation.h>
36 #import <wtf/MainThread.h>
37 #import <wtf/MediaTime.h>
38 #import <wtf/StringPrintStream.h>
39 #import <wtf/Vector.h>
40 #import <wtf/cf/TypeCastsCF.h>
41
42 #import <pal/cf/CoreMediaSoftLink.h>
43 #import "CoreVideoSoftLink.h"
44 #import "VideoToolboxSoftLink.h"
45
46 using namespace PAL;
47
48 WTF_DECLARE_CF_TYPE_TRAIT(CMSampleBuffer);
49
50 namespace WebCore {
51
52 WebCoreDecompressionSession::WebCoreDecompressionSession(Mode mode)
53     : m_mode(mode)
54     , m_decompressionQueue(adoptOSObject(dispatch_queue_create("WebCoreDecompressionSession Decompression Queue", DISPATCH_QUEUE_SERIAL)))
55     , m_enqueingQueue(adoptOSObject(dispatch_queue_create("WebCoreDecompressionSession Enqueueing Queue", DISPATCH_QUEUE_SERIAL)))
56     , m_hasAvailableImageSemaphore(adoptOSObject(dispatch_semaphore_create(0)))
57 {
58 }
59
60 void WebCoreDecompressionSession::invalidate()
61 {
62     m_invalidated = true;
63     m_notificationCallback = nullptr;
64     m_hasAvailableFrameCallback = nullptr;
65     setTimebase(nullptr);
66     if (m_timerSource)
67         dispatch_source_cancel(m_timerSource.get());
68 }
69
70 void WebCoreDecompressionSession::setTimebase(CMTimebaseRef timebase)
71 {
72     if (m_timebase == timebase)
73         return;
74
75     if (m_timebase)
76         CMTimebaseRemoveTimerDispatchSource(m_timebase.get(), m_timerSource.get());
77
78     m_timebase = timebase;
79
80     if (m_timebase) {
81         if (!m_timerSource) {
82             m_timerSource = adoptOSObject(dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, dispatch_get_main_queue()));
83             dispatch_source_set_event_handler(m_timerSource.get(), [this] {
84                 automaticDequeue();
85             });
86             dispatch_activate(m_timerSource.get());
87         }
88         CMTimebaseAddTimerDispatchSource(m_timebase.get(), m_timerSource.get());
89     }
90 }
91
92 void WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData()
93 {
94     if (!isReadyForMoreMediaData() || !m_notificationCallback)
95         return;
96
97     LOG(Media, "WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData(%p) - isReadyForMoreMediaData(%d), hasCallback(%d)", this, isReadyForMoreMediaData(), !!m_notificationCallback);
98
99     if (isMainThread()) {
100         m_notificationCallback();
101         return;
102     }
103
104     RefPtr<WebCoreDecompressionSession> protectedThis { this };
105     dispatch_async(dispatch_get_main_queue(), [protectedThis] {
106         if (protectedThis->m_notificationCallback)
107             protectedThis->m_notificationCallback();
108     });
109 }
110
111 void WebCoreDecompressionSession::enqueueSample(CMSampleBufferRef sampleBuffer, bool displaying)
112 {
113     CMItemCount itemCount = 0;
114     if (noErr != CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, nullptr, &itemCount))
115         return;
116
117     Vector<CMSampleTimingInfo> timingInfoArray;
118     timingInfoArray.grow(itemCount);
119     if (noErr != CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, itemCount, timingInfoArray.data(), nullptr))
120         return;
121
122     if (!m_decompressionQueue)
123         m_decompressionQueue = adoptOSObject(dispatch_queue_create("SourceBufferPrivateAVFObjC Decompression Queue", DISPATCH_QUEUE_SERIAL));
124
125     // CMBufferCallbacks contains 64-bit pointers that aren't 8-byte aligned. To suppress the linker
126     // warning about this, we prepend 4 bytes of padding when building for macOS.
127 #if PLATFORM(MAC)
128     const size_t padSize = 4;
129 #else
130     const size_t padSize = 0;
131 #endif
132
133     if (!m_producerQueue) {
134         CMBufferQueueRef outQueue { nullptr };
135 #pragma pack(push, 4)
136         struct { uint8_t pad[padSize]; CMBufferCallbacks callbacks; } callbacks { { }, {
137             0,
138             nullptr,
139             &getDecodeTime,
140             &getPresentationTime,
141             &getDuration,
142             nullptr,
143             &compareBuffers,
144             nullptr,
145             nullptr,
146         } };
147 #pragma pack(pop)
148         CMBufferQueueCreate(kCFAllocatorDefault, kMaximumCapacity, &callbacks.callbacks, &outQueue);
149         m_producerQueue = adoptCF(outQueue);
150     }
151
152     if (!m_consumerQueue) {
153         CMBufferQueueRef outQueue { nullptr };
154 #pragma pack(push, 4)
155         struct { uint8_t pad[padSize]; CMBufferCallbacks callbacks; } callbacks { { }, {
156             0,
157             nullptr,
158             &getDecodeTime,
159             &getPresentationTime,
160             &getDuration,
161             nullptr,
162             &compareBuffers,
163             nullptr,
164             nullptr,
165         } };
166 #pragma pack(pop)
167         CMBufferQueueCreate(kCFAllocatorDefault, kMaximumCapacity, &callbacks.callbacks, &outQueue);
168         m_consumerQueue = adoptCF(outQueue);
169     }
170
171     ++m_framesBeingDecoded;
172
173     LOG(Media, "WebCoreDecompressionSession::enqueueSample(%p) - framesBeingDecoded(%d)", this, m_framesBeingDecoded);
174
175     dispatch_async(m_decompressionQueue.get(), [protectedThis = makeRefPtr(*this), strongBuffer = retainPtr(sampleBuffer), displaying] {
176         protectedThis->decodeSample(strongBuffer.get(), displaying);
177     });
178 }
179
180 bool WebCoreDecompressionSession::shouldDecodeSample(CMSampleBufferRef sample, bool displaying)
181 {
182     if (!displaying)
183         return true;
184
185     if (!m_timebase)
186         return true;
187
188     auto currentTime = CMTimebaseGetTime(m_timebase.get());
189     auto presentationStartTime = CMSampleBufferGetPresentationTimeStamp(sample);
190     auto duration = CMSampleBufferGetDuration(sample);
191     auto presentationEndTime = CMTimeAdd(presentationStartTime, duration);
192     if (CMTimeCompare(presentationEndTime, currentTime) >= 0)
193         return true;
194
195     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
196     if (!attachments)
197         return true;
198
199     for (CFIndex index = 0, count = CFArrayGetCount(attachments); index < count; ++index) {
200         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, index);
201         CFBooleanRef dependedOn = (CFBooleanRef)CFDictionaryGetValue(attachmentDict, kCMSampleAttachmentKey_IsDependedOnByOthers);
202         if (dependedOn && !CFBooleanGetValue(dependedOn))
203             return false;
204     }
205
206     return true;
207 }
208
209 void WebCoreDecompressionSession::ensureDecompressionSessionForSample(CMSampleBufferRef sample)
210 {
211     if (isInvalidated())
212         return;
213
214     CMVideoFormatDescriptionRef videoFormatDescription = CMSampleBufferGetFormatDescription(sample);
215     if (m_decompressionSession && !VTDecompressionSessionCanAcceptFormatDescription(m_decompressionSession.get(), videoFormatDescription)) {
216         VTDecompressionSessionWaitForAsynchronousFrames(m_decompressionSession.get());
217         m_decompressionSession = nullptr;
218     }
219
220     if (!m_decompressionSession) {
221         CMVideoFormatDescriptionRef videoFormatDescription = CMSampleBufferGetFormatDescription(sample);
222         NSDictionary* videoDecoderSpecification = @{ (NSString *)kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder: @YES };
223
224         NSDictionary *attributes;
225         if (m_mode == OpenGL) {
226             attributes = nil;
227         } else {
228             ASSERT(m_mode == RGB);
229             attributes = @{(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
230         }
231         VTDecompressionSessionRef decompressionSessionOut = nullptr;
232         if (noErr == VTDecompressionSessionCreate(kCFAllocatorDefault, videoFormatDescription, (CFDictionaryRef)videoDecoderSpecification, (CFDictionaryRef)attributes, nullptr, &decompressionSessionOut)) {
233             m_decompressionSession = adoptCF(decompressionSessionOut);
234             CFArrayRef rawSuggestedQualityOfServiceTiers = nullptr;
235             VTSessionCopyProperty(decompressionSessionOut, kVTDecompressionPropertyKey_SuggestedQualityOfServiceTiers, kCFAllocatorDefault, &rawSuggestedQualityOfServiceTiers);
236             m_qosTiers = adoptCF(rawSuggestedQualityOfServiceTiers);
237             m_currentQosTier = 0;
238             resetQosTier();
239         }
240     }
241 }
242
243 void WebCoreDecompressionSession::decodeSample(CMSampleBufferRef sample, bool displaying)
244 {
245     if (isInvalidated())
246         return;
247
248     ensureDecompressionSessionForSample(sample);
249
250     VTDecodeInfoFlags flags { kVTDecodeFrame_EnableTemporalProcessing };
251     if (!displaying)
252         flags |= kVTDecodeFrame_DoNotOutputFrame;
253
254     if (!shouldDecodeSample(sample, displaying)) {
255         ++m_totalVideoFrames;
256         ++m_droppedVideoFrames;
257         --m_framesBeingDecoded;
258         maybeBecomeReadyForMoreMediaData();
259         return;
260     }
261
262     MonotonicTime startTime = MonotonicTime::now();
263     VTDecompressionSessionDecodeFrameWithOutputHandler(m_decompressionSession.get(), sample, flags, nullptr, [this, displaying, startTime](OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef imageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration) {
264         double deltaRatio = (MonotonicTime::now() - startTime).seconds() / CMTimeGetSeconds(presentationDuration);
265
266         updateQosWithDecodeTimeStatistics(deltaRatio);
267         handleDecompressionOutput(displaying, status, infoFlags, imageBuffer, presentationTimeStamp, presentationDuration);
268     });
269 }
270
271 RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::decodeSampleSync(CMSampleBufferRef sample)
272 {
273     if (isInvalidated())
274         return nullptr;
275
276     ensureDecompressionSessionForSample(sample);
277
278     RetainPtr<CVPixelBufferRef> pixelBuffer;
279     VTDecodeInfoFlags flags { 0 };
280     VTDecompressionSessionDecodeFrameWithOutputHandler(m_decompressionSession.get(), sample, flags, nullptr, [&] (OSStatus, VTDecodeInfoFlags, CVImageBufferRef imageBuffer, CMTime, CMTime) mutable {
281         if (imageBuffer && CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID())
282             pixelBuffer = (CVPixelBufferRef)imageBuffer;
283     });
284     return pixelBuffer;
285 }
286
287 void WebCoreDecompressionSession::handleDecompressionOutput(bool displaying, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef rawImageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration)
288 {
289     ++m_totalVideoFrames;
290     if (infoFlags & kVTDecodeInfo_FrameDropped)
291         ++m_droppedVideoFrames;
292
293     CMVideoFormatDescriptionRef rawImageBufferDescription = nullptr;
294     if (status != noErr || noErr != CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rawImageBuffer, &rawImageBufferDescription)) {
295         ++m_corruptedVideoFrames;
296         --m_framesBeingDecoded;
297         maybeBecomeReadyForMoreMediaData();
298         return;
299     }
300     RetainPtr<CMVideoFormatDescriptionRef> imageBufferDescription = adoptCF(rawImageBufferDescription);
301
302     CMSampleTimingInfo imageBufferTiming {
303         presentationDuration,
304         presentationTimeStamp,
305         presentationTimeStamp,
306     };
307
308     CMSampleBufferRef rawImageSampleBuffer = nullptr;
309     if (noErr != CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, rawImageBuffer, imageBufferDescription.get(), &imageBufferTiming, &rawImageSampleBuffer)) {
310         ++m_corruptedVideoFrames;
311         --m_framesBeingDecoded;
312         maybeBecomeReadyForMoreMediaData();
313         return;
314     }
315
316     dispatch_async(m_enqueingQueue.get(), [protectedThis = makeRefPtr(this), imageSampleBuffer = adoptCF(rawImageSampleBuffer), displaying] {
317         protectedThis->enqueueDecodedSample(imageSampleBuffer.get(), displaying);
318     });
319 }
320
321 RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::getFirstVideoFrame()
322 {
323     if (!m_producerQueue || CMBufferQueueIsEmpty(m_producerQueue.get()))
324         return nullptr;
325
326     RetainPtr<CMSampleBufferRef> currentSample = adoptCF(checked_cf_cast<CMSampleBufferRef>(CMBufferQueueDequeueAndRetain(m_producerQueue.get())));
327     RetainPtr<CVPixelBufferRef> imageBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(currentSample.get());
328     ASSERT(CFGetTypeID(imageBuffer.get()) == CVPixelBufferGetTypeID());
329
330     maybeBecomeReadyForMoreMediaData();
331
332     return imageBuffer;
333 }
334
335 void WebCoreDecompressionSession::automaticDequeue()
336 {
337     if (!m_timebase)
338         return;
339
340     auto time = PAL::toMediaTime(CMTimebaseGetTime(m_timebase.get()));
341     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - purging all samples before time(%s)", this, toString(time).utf8().data());
342
343     MediaTime nextFireTime = MediaTime::positiveInfiniteTime();
344     bool releasedImageBuffers = false;
345
346     while (CMSampleBufferRef firstSample = checked_cf_cast<CMSampleBufferRef>(CMBufferQueueGetHead(m_producerQueue.get()))) {
347         MediaTime presentationTimestamp = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
348         MediaTime duration = PAL::toMediaTime(CMSampleBufferGetDuration(firstSample));
349         MediaTime presentationEndTimestamp = presentationTimestamp + duration;
350         if (time > presentationEndTimestamp) {
351             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
352             releasedImageBuffers = true;
353             continue;
354         }
355
356 #if !LOG_DISABLED
357         auto begin = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
358         auto end = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
359         LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue(%s -> %s)", this, toString(begin).utf8().data(), toString(end).utf8().data());
360 #endif
361
362         nextFireTime = presentationEndTimestamp;
363         break;
364     }
365
366     if (releasedImageBuffers)
367         maybeBecomeReadyForMoreMediaData();
368
369     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue empty", this);
370     CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), PAL::toCMTime(nextFireTime), 0);
371 }
372
373 void WebCoreDecompressionSession::enqueueDecodedSample(CMSampleBufferRef sample, bool displaying)
374 {
375     if (isInvalidated())
376         return;
377
378     --m_framesBeingDecoded;
379
380     if (!displaying) {
381         maybeBecomeReadyForMoreMediaData();
382         return;
383     }
384
385     bool shouldNotify = true;
386
387     if (displaying && m_timebase) {
388         auto currentRate = CMTimebaseGetRate(m_timebase.get());
389         auto currentTime = PAL::toMediaTime(CMTimebaseGetTime(m_timebase.get()));
390         auto presentationStartTime = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(sample));
391         auto presentationEndTime = presentationStartTime + PAL::toMediaTime(CMSampleBufferGetDuration(sample));
392         if (currentTime < presentationStartTime || currentTime >= presentationEndTime)
393             shouldNotify = false;
394
395         if (currentRate > 0 && presentationEndTime < currentTime) {
396 #if !LOG_DISABLED
397             auto begin = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
398             auto end = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
399             LOG(Media, "WebCoreDecompressionSession::enqueueDecodedSample(%p) - dropping frame late by %s, framesBeingDecoded(%d), producerQueue(%s -> %s)", this, toString(presentationEndTime - currentTime).utf8().data(), m_framesBeingDecoded, toString(begin).utf8().data(), toString(end).utf8().data());
400 #endif
401             ++m_droppedVideoFrames;
402             return;
403         }
404     }
405
406     CMBufferQueueEnqueue(m_producerQueue.get(), sample);
407
408 #if !LOG_DISABLED
409     auto begin = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
410     auto end = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
411     auto presentationTime = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(sample));
412     LOG(Media, "WebCoreDecompressionSession::enqueueDecodedSample(%p) - presentationTime(%s), framesBeingDecoded(%d), producerQueue(%s -> %s)", this, toString(presentationTime).utf8().data(), m_framesBeingDecoded, toString(begin).utf8().data(), toString(end).utf8().data());
413 #endif
414
415     if (m_timebase)
416         CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
417
418     if (!m_hasAvailableFrameCallback)
419         return;
420
421     if (!shouldNotify)
422         return;
423
424     dispatch_async(dispatch_get_main_queue(), [protectedThis = makeRefPtr(this), callback = WTFMove(m_hasAvailableFrameCallback)] {
425         callback();
426     });
427 }
428
429 bool WebCoreDecompressionSession::isReadyForMoreMediaData() const
430 {
431     CMItemCount producerCount = m_producerQueue ? CMBufferQueueGetBufferCount(m_producerQueue.get()) : 0;
432     return m_framesBeingDecoded + producerCount <= kHighWaterMark;
433 }
434
435 void WebCoreDecompressionSession::requestMediaDataWhenReady(std::function<void()> notificationCallback)
436 {
437     LOG(Media, "WebCoreDecompressionSession::requestMediaDataWhenReady(%p), hasNotificationCallback(%d)", this, !!notificationCallback);
438     m_notificationCallback = notificationCallback;
439
440     if (notificationCallback && isReadyForMoreMediaData()) {
441         RefPtr<WebCoreDecompressionSession> protectedThis { this };
442         dispatch_async(dispatch_get_main_queue(), [protectedThis] {
443             if (protectedThis->m_notificationCallback)
444                 protectedThis->m_notificationCallback();
445         });
446     }
447 }
448
449 void WebCoreDecompressionSession::stopRequestingMediaData()
450 {
451     LOG(Media, "WebCoreDecompressionSession::stopRequestingMediaData(%p)", this);
452     m_notificationCallback = nullptr;
453 }
454
455 void WebCoreDecompressionSession::notifyWhenHasAvailableVideoFrame(std::function<void()> callback)
456 {
457     if (callback && m_producerQueue && !CMBufferQueueIsEmpty(m_producerQueue.get())) {
458         dispatch_async(dispatch_get_main_queue(), [callback] {
459             callback();
460         });
461         return;
462     }
463     m_hasAvailableFrameCallback = callback;
464 }
465
466 RetainPtr<CVPixelBufferRef> WebCoreDecompressionSession::imageForTime(const MediaTime& time, ImageForTimeFlags flags)
467 {
468     if (CMBufferQueueIsEmpty(m_producerQueue.get())) {
469         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - time(%s), queue empty", this, toString(time).utf8().data());
470         return nullptr;
471     }
472
473     bool allowEarlier = flags == WebCoreDecompressionSession::AllowEarlier;
474     bool allowLater = flags == WebCoreDecompressionSession::AllowLater;
475
476     MediaTime startTime = PAL::toMediaTime(CMBufferQueueGetFirstPresentationTimeStamp(m_producerQueue.get()));
477     MediaTime endTime = PAL::toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
478     if (!allowLater && time < startTime) {
479         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - time(%s) too early for queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
480         return nullptr;
481     }
482
483     bool releasedImageBuffers = false;
484
485     while (CMSampleBufferRef firstSample = checked_cf_cast<CMSampleBufferRef>(CMBufferQueueGetHead(m_producerQueue.get()))) {
486         MediaTime presentationTimestamp = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
487         MediaTime duration = PAL::toMediaTime(CMSampleBufferGetDuration(firstSample));
488         MediaTime presentationEndTimestamp = presentationTimestamp + duration;
489         if (!allowLater && presentationTimestamp > time)
490             return nullptr;
491         if (!allowEarlier && presentationEndTimestamp < time) {
492             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
493             releasedImageBuffers = true;
494             continue;
495         }
496
497         RetainPtr<CMSampleBufferRef> currentSample = adoptCF(checked_cf_cast<CMSampleBufferRef>(CMBufferQueueDequeueAndRetain(m_producerQueue.get())));
498         RetainPtr<CVPixelBufferRef> imageBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(currentSample.get());
499         ASSERT(CFGetTypeID(imageBuffer.get()) == CVPixelBufferGetTypeID());
500
501         if (m_timebase)
502             CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
503
504         maybeBecomeReadyForMoreMediaData();
505
506         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - found sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
507         return imageBuffer;
508     }
509
510     if (m_timebase)
511         CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
512
513     if (releasedImageBuffers)
514         maybeBecomeReadyForMoreMediaData();
515
516     LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - no matching sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
517     return nullptr;
518 }
519
520 void WebCoreDecompressionSession::flush()
521 {
522     dispatch_sync(m_decompressionQueue.get(), [protectedThis = RefPtr<WebCoreDecompressionSession>(this)] {
523         CMBufferQueueReset(protectedThis->m_producerQueue.get());
524         dispatch_sync(protectedThis->m_enqueingQueue.get(), [protectedThis] {
525             CMBufferQueueReset(protectedThis->m_consumerQueue.get());
526             protectedThis->m_framesSinceLastQosCheck = 0;
527             protectedThis->m_currentQosTier = 0;
528             protectedThis->resetQosTier();
529         });
530     });
531 }
532
533 CMTime WebCoreDecompressionSession::getDecodeTime(CMBufferRef buf, void*)
534 {
535     CMSampleBufferRef sample = checked_cf_cast<CMSampleBufferRef>(buf);
536     return CMSampleBufferGetDecodeTimeStamp(sample);
537 }
538
539 CMTime WebCoreDecompressionSession::getPresentationTime(CMBufferRef buf, void*)
540 {
541     CMSampleBufferRef sample = checked_cf_cast<CMSampleBufferRef>(buf);
542     return CMSampleBufferGetPresentationTimeStamp(sample);
543 }
544
545 CMTime WebCoreDecompressionSession::getDuration(CMBufferRef buf, void*)
546 {
547     CMSampleBufferRef sample = checked_cf_cast<CMSampleBufferRef>(buf);
548     return CMSampleBufferGetDuration(sample);
549 }
550
551 CFComparisonResult WebCoreDecompressionSession::compareBuffers(CMBufferRef buf1, CMBufferRef buf2, void* refcon)
552 {
553     return (CFComparisonResult)CMTimeCompare(getPresentationTime(buf1, refcon), getPresentationTime(buf2, refcon));
554 }
555
556 void WebCoreDecompressionSession::resetQosTier()
557 {
558     if (!m_qosTiers || !m_decompressionSession)
559         return;
560
561     if (m_currentQosTier < 0 || m_currentQosTier >= CFArrayGetCount(m_qosTiers.get()))
562         return;
563
564     auto tier = (CFDictionaryRef)CFArrayGetValueAtIndex(m_qosTiers.get(), m_currentQosTier);
565     LOG(Media, "WebCoreDecompressionSession::resetQosTier(%p) - currentQosTier(%ld), tier(%@)", this, m_currentQosTier, [(NSDictionary *)tier description]);
566
567     VTSessionSetProperties(m_decompressionSession.get(), tier);
568     m_framesSinceLastQosCheck = 0;
569 }
570
571 void WebCoreDecompressionSession::increaseQosTier()
572 {
573     if (!m_qosTiers)
574         return;
575
576     if (m_currentQosTier + 1 >= CFArrayGetCount(m_qosTiers.get()))
577         return;
578
579     ++m_currentQosTier;
580     resetQosTier();
581 }
582
583 void WebCoreDecompressionSession::decreaseQosTier()
584 {
585     if (!m_qosTiers)
586         return;
587
588     if (m_currentQosTier <= 0)
589         return;
590
591     --m_currentQosTier;
592     resetQosTier();
593 }
594
595 void WebCoreDecompressionSession::updateQosWithDecodeTimeStatistics(double ratio)
596 {
597     static const double kMovingAverageAlphaValue = 0.1;
598     static const unsigned kNumberOfFramesBeforeSwitchingTiers = 60;
599     static const double kHighWaterDecodeRatio = 1.;
600     static const double kLowWaterDecodeRatio = 0.5;
601
602     if (!m_timebase)
603         return;
604
605     double rate = CMTimebaseGetRate(m_timebase.get());
606     if (!rate)
607         rate = 1;
608
609     m_decodeRatioMovingAverage += kMovingAverageAlphaValue * (ratio - m_decodeRatioMovingAverage) * rate;
610     if (++m_framesSinceLastQosCheck < kNumberOfFramesBeforeSwitchingTiers)
611         return;
612
613     LOG(Media, "WebCoreDecompressionSession::updateQosWithDecodeTimeStatistics(%p) - framesSinceLastQosCheck(%ld), decodeRatioMovingAverage(%g)", this, m_framesSinceLastQosCheck, m_decodeRatioMovingAverage);
614     if (m_decodeRatioMovingAverage > kHighWaterDecodeRatio)
615         increaseQosTier();
616     else if (m_decodeRatioMovingAverage < kLowWaterDecodeRatio)
617         decreaseQosTier();
618     m_framesSinceLastQosCheck = 0;
619 }
620
621 }
622
623 #endif