d569022e7fe48948bf0f59b07ed889afb565815f
[WebKit.git] / Source / WebCore / platform / graphics / avfoundation / cf / MediaPlayerPrivateAVFoundationCF.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if PLATFORM(WIN) && ENABLE(VIDEO) 
29
30 #if USE(AVFOUNDATION)
31
32 #include "MediaPlayerPrivateAVFoundationCF.h"
33
34 #include "ApplicationCacheResource.h"
35 #include "CDMSessionAVFoundationCF.h"
36 #include "COMPtr.h"
37 #include "FloatConversion.h"
38 #include "GraphicsContext.h"
39 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
40 #include "InbandTextTrackPrivateAVCF.h"
41 #else
42 #include "InbandTextTrackPrivateLegacyAVCF.h"
43 #endif
44 #include "MediaTimeAVFoundation.h"
45 #include "URL.h"
46 #include "Logging.h"
47 #include "PlatformCALayerClient.h"
48 #include "PlatformCALayerWin.h"
49 #include "TimeRanges.h"
50 #include "WebCoreAVCFResourceLoader.h"
51
52 #include <AVFoundationCF/AVCFPlayerItem.h>
53 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
54 #include <AVFoundationCF/AVCFPlayerItemLegibleOutput.h>
55 #endif
56 #include <AVFoundationCF/AVCFPlayerLayer.h>
57 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
58 #include <AVFoundationCF/AVCFAssetResourceLoader.h>
59 #endif
60 #include <AVFoundationCF/AVFoundationCF.h>
61 #include <d3d9.h>
62 #include <delayimp.h>
63 #include <dispatch/dispatch.h>
64 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
65 #include <runtime/DataView.h>
66 #include <runtime/Uint16Array.h>
67 #endif
68 #include <wtf/HashMap.h>
69 #include <wtf/NeverDestroyed.h>
70 #include <wtf/Threading.h>
71 #include <wtf/text/CString.h>
72 #include <wtf/text/StringView.h>
73 #include <wtf/StringPrintStream.h>
74
75 // Soft-linking headers must be included last since they #define functions, constants, etc.
76 #include "AVFoundationCFSoftLinking.h"
77 #include "CoreMediaSoftLink.h"
78
79 // We don't bother softlinking against libdispatch since it's already been loaded by AAS.
80 #ifdef DEBUG_ALL
81 #pragma comment(lib, "libdispatch_debug.lib")
82 #else
83 #pragma comment(lib, "libdispatch.lib")
84 #endif
85
86 using namespace std;
87
88 namespace WebCore {
89
90 class LayerClient;
91
92 class AVFWrapper {
93 public:
94     AVFWrapper(MediaPlayerPrivateAVFoundationCF*);
95     ~AVFWrapper();
96
97     void scheduleDisconnectAndDelete();
98
99     void createAVCFVideoLayer();
100     void destroyVideoLayer();
101     PlatformLayer* platformLayer();
102
103     CACFLayerRef caVideoLayer() { return m_caVideoLayer.get(); }
104     PlatformLayer* videoLayerWrapper() { return m_videoLayerWrapper ? m_videoLayerWrapper->platformLayer() : 0; };
105     void setVideoLayerNeedsCommit();
106     void setVideoLayerHidden(bool);
107
108     void createImageGenerator();
109     void destroyImageGenerator();
110     RetainPtr<CGImageRef> createImageForTimeInRect(const MediaTime&, const FloatRect&);
111
112     void createAssetForURL(const String& url, bool inheritURI);
113     void setAsset(AVCFURLAssetRef);
114     
115     void createPlayer(IDirect3DDevice9*);
116     void createPlayerItem();
117     
118     void checkPlayability();
119     void beginLoadingMetadata();
120     
121     void seekToTime(const MediaTime&, const MediaTime&, const MediaTime&);
122     void updateVideoLayerGravity();
123
124     void setCurrentTextTrack(InbandTextTrackPrivateAVF*);
125     InbandTextTrackPrivateAVF* currentTextTrack() const { return m_currentTextTrack; }
126
127 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
128     static void legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef, CFArrayRef attributedString, CFArrayRef nativeSampleBuffers, CMTime itemTime);
129     static void processCue(void* context);
130 #endif
131 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
132     static Boolean resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef, AVCFAssetResourceLoadingRequestRef, void* context);
133 #endif
134     static void loadMetadataCompletionCallback(AVCFAssetRef, void*);
135     static void loadPlayableCompletionCallback(AVCFAssetRef, void*);
136     static void periodicTimeObserverCallback(AVCFPlayerRef, CMTime, void*);
137     static void seekCompletedCallback(AVCFPlayerItemRef, Boolean, void*);
138     static void notificationCallback(CFNotificationCenterRef, void*, CFStringRef, const void*, CFDictionaryRef);
139     static void processNotification(void* context);
140
141     inline AVCFPlayerLayerRef videoLayer() const { return (AVCFPlayerLayerRef)m_avCFVideoLayer.get(); }
142     inline AVCFPlayerRef avPlayer() const { return (AVCFPlayerRef)m_avPlayer.get(); }
143     inline AVCFURLAssetRef avAsset() const { return (AVCFURLAssetRef)m_avAsset.get(); }
144     inline AVCFPlayerItemRef avPlayerItem() const { return (AVCFPlayerItemRef)m_avPlayerItem.get(); }
145     inline AVCFPlayerObserverRef timeObserver() const { return (AVCFPlayerObserverRef)m_timeObserver.get(); }
146     inline AVCFAssetImageGeneratorRef imageGenerator() const { return m_imageGenerator.get(); }
147 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
148     inline AVCFPlayerItemLegibleOutputRef legibleOutput() const { return m_legibleOutput.get(); }
149     AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia() const;
150 #endif
151     inline dispatch_queue_t dispatchQueue() const { return m_notificationQueue; }
152
153 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
154     RetainPtr<AVCFAssetResourceLoadingRequestRef> takeRequestForKeyURI(const String&);
155     void setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest);
156 #endif
157
158 private:
159     inline void* callbackContext() const { return reinterpret_cast<void*>(m_objectID); }
160
161     static Lock& mapLock();
162     static HashMap<uintptr_t, AVFWrapper*>& map();
163     static AVFWrapper* avfWrapperForCallbackContext(void*);
164     void addToMap();
165     void removeFromMap() const;
166 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
167     bool shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest);
168     static void processShouldWaitForLoadingOfResource(void* context);
169 #endif
170
171     static void disconnectAndDeleteAVFWrapper(void*);
172
173     static uintptr_t s_nextAVFWrapperObjectID;
174     uintptr_t m_objectID;
175
176     MediaPlayerPrivateAVFoundationCF* m_owner;
177
178     RetainPtr<AVCFPlayerRef> m_avPlayer;
179     RetainPtr<AVCFURLAssetRef> m_avAsset;
180     RetainPtr<AVCFPlayerItemRef> m_avPlayerItem;
181     RetainPtr<AVCFPlayerLayerRef> m_avCFVideoLayer;
182     RetainPtr<AVCFPlayerObserverRef> m_timeObserver;
183     RetainPtr<AVCFAssetImageGeneratorRef> m_imageGenerator;
184 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
185     RetainPtr<AVCFPlayerItemLegibleOutputRef> m_legibleOutput;
186     RetainPtr<AVCFMediaSelectionGroupRef> m_selectionGroup;
187 #endif
188
189     dispatch_queue_t m_notificationQueue;
190
191     mutable RetainPtr<CACFLayerRef> m_caVideoLayer;
192     RefPtr<PlatformCALayer> m_videoLayerWrapper;
193
194     std::unique_ptr<LayerClient> m_layerClient;
195     COMPtr<IDirect3DDevice9Ex> m_d3dDevice;
196
197     InbandTextTrackPrivateAVF* m_currentTextTrack;
198
199 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
200     HashMap<String, Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>>> m_keyURIToRequestMap;
201     AVCFAssetResourceLoaderCallbacks m_resourceLoaderCallbacks;
202 #endif
203 };
204
205 uintptr_t AVFWrapper::s_nextAVFWrapperObjectID;
206
207 class LayerClient : public PlatformCALayerClient {
208 public:
209     LayerClient(AVFWrapper* parent) : m_parent(parent) { }
210     virtual ~LayerClient() { m_parent = 0; }
211
212 private:
213     virtual void platformCALayerLayoutSublayersOfLayer(PlatformCALayer*);
214     virtual bool platformCALayerRespondsToLayoutChanges() const { return true; }
215
216     virtual void platformCALayerAnimationStarted(CFTimeInterval beginTime) { }
217     virtual GraphicsLayer::CompositingCoordinatesOrientation platformCALayerContentsOrientation() const { return GraphicsLayer::CompositingCoordinatesBottomUp; }
218     virtual void platformCALayerPaintContents(PlatformCALayer*, GraphicsContext&, const FloatRect&) { }
219     virtual bool platformCALayerShowDebugBorders() const { return false; }
220     virtual bool platformCALayerShowRepaintCounter(PlatformCALayer*) const { return false; }
221     virtual int platformCALayerIncrementRepaintCount(PlatformCALayer*) { return 0; }
222
223     virtual bool platformCALayerContentsOpaque() const { return false; }
224     virtual bool platformCALayerDrawsContent() const { return false; }
225     virtual float platformCALayerDeviceScaleFactor() const { return 1; }
226
227     AVFWrapper* m_parent;
228 };
229
230 #if !LOG_DISABLED
231 static const char* boolString(bool val)
232 {
233     return val ? "true" : "false";
234 }
235 #endif
236
237 static CFArrayRef createMetadataKeyNames()
238 {
239     static const CFStringRef keyNames[] = {
240         AVCFAssetPropertyDuration,
241         AVCFAssetPropertyNaturalSize,
242         AVCFAssetPropertyPreferredTransform,
243         AVCFAssetPropertyPreferredRate,
244         AVCFAssetPropertyPlayable,
245         AVCFAssetPropertyTracks,
246 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
247         AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions,
248 #endif
249     };
250     
251     return CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
252 }
253
254 static CFArrayRef metadataKeyNames()
255 {
256     static CFArrayRef keys = createMetadataKeyNames();
257     return keys;
258 }
259
260 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
261 static CFStringRef CMTimeRangeStartKey()
262 {
263     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("start")));
264     return key;
265 }
266
267 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
268 static CFStringRef CMTimeRangeDurationKey()
269 {
270     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("duration")));
271     return key;
272 }
273
274 // FIXME: It would be better if AVCF exported this notification name.
275 static CFStringRef CACFContextNeedsFlushNotification()
276 {
277     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, name, (CFSTR("kCACFContextNeedsFlushNotification")));
278     return name;
279 }
280
281 // Define AVCF object accessors as inline functions here instead of in MediaPlayerPrivateAVFoundationCF so we don't have
282 // to include the AVCF headers in MediaPlayerPrivateAVFoundationCF.h
283 inline AVCFPlayerLayerRef videoLayer(AVFWrapper* wrapper)
284
285     return wrapper ? wrapper->videoLayer() : 0; 
286 }
287
288 inline AVCFPlayerRef avPlayer(AVFWrapper* wrapper)
289
290     return wrapper ? wrapper->avPlayer() : 0; 
291 }
292
293 inline AVCFURLAssetRef avAsset(AVFWrapper* wrapper)
294
295     return wrapper ? wrapper->avAsset() : 0; 
296 }
297
298 inline AVCFPlayerItemRef avPlayerItem(AVFWrapper* wrapper)
299
300     return wrapper ? wrapper->avPlayerItem() : 0; 
301 }
302
303 inline AVCFAssetImageGeneratorRef imageGenerator(AVFWrapper* wrapper)
304
305     return wrapper ? wrapper->imageGenerator() : 0; 
306 }
307
308 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
309 inline AVCFPlayerItemLegibleOutputRef avLegibleOutput(AVFWrapper* wrapper)
310 {
311     return wrapper ? wrapper->legibleOutput() : 0;
312 }
313
314 inline AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia(AVFWrapper* wrapper)
315 {
316     return wrapper ? wrapper->safeMediaSelectionGroupForLegibleMedia() : 0;
317 }
318 #endif
319
320 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
321 static dispatch_queue_t globalQueue = nullptr;
322
323 static void initGlobalLoaderDelegateQueue(void* ctx)
324 {
325     globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
326 }
327
328 static dispatch_queue_t globalLoaderDelegateQueue()
329 {
330     static dispatch_once_t onceToken;
331
332     dispatch_once_f(&onceToken, nullptr, initGlobalLoaderDelegateQueue);
333
334     return globalQueue;
335 }
336 #endif
337
338 void MediaPlayerPrivateAVFoundationCF::registerMediaEngine(MediaEngineRegistrar registrar)
339 {
340     if (isAvailable())
341         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationCF>(player); },
342             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
343 }
344
345 MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(MediaPlayer* player)
346     : MediaPlayerPrivateAVFoundation(player)
347     , m_avfWrapper(0)
348     , m_videoFrameHasDrawn(false)
349 {
350     LOG(Media, "MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(%p)", this);
351 }
352
353 MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF()
354 {
355     LOG(Media, "MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF(%p)", this);
356 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
357     for (auto& pair : m_resourceLoaderMap)
358         pair.value->invalidate();
359 #endif
360     cancelLoad();
361 }
362
363 void MediaPlayerPrivateAVFoundationCF::cancelLoad()
364 {
365     LOG(Media, "MediaPlayerPrivateAVFoundationCF::cancelLoad(%p)", this);
366
367     // Do nothing when our cancellation of pending loading calls its completion handler
368     setDelayCallbacks(true);
369     setIgnoreLoadStateChanges(true);
370
371     tearDownVideoRendering();
372
373     clearTextTracks();
374
375     if (m_avfWrapper) {
376         // The AVCF objects have to be destroyed on the same dispatch queue used for notifications, so schedule a call to 
377         // disconnectAndDeleteAVFWrapper on that queue. 
378         m_avfWrapper->scheduleDisconnectAndDelete();
379         m_avfWrapper = 0;
380     }
381
382     setIgnoreLoadStateChanges(false);
383     setDelayCallbacks(false);
384 }
385
386 void MediaPlayerPrivateAVFoundationCF::updateVideoLayerGravity()
387 {
388     ASSERT(supportsAcceleratedRendering());
389
390     if (m_avfWrapper)
391         m_avfWrapper->updateVideoLayerGravity();
392 }
393
394 bool MediaPlayerPrivateAVFoundationCF::hasLayerRenderer() const
395 {
396     return videoLayer(m_avfWrapper);
397 }
398
399 bool MediaPlayerPrivateAVFoundationCF::hasContextRenderer() const
400 {
401     return imageGenerator(m_avfWrapper);
402 }
403
404 void MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer()
405 {
406     LOG(Media, "MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer(%p)", this);
407     ASSERT(isMainThread());
408
409     if (imageGenerator(m_avfWrapper))
410         return;
411
412     if (m_avfWrapper)
413         m_avfWrapper->createImageGenerator();
414 }
415
416 void MediaPlayerPrivateAVFoundationCF::destroyContextVideoRenderer()
417 {
418     ASSERT(isMainThread());
419     if (m_avfWrapper)
420         m_avfWrapper->destroyImageGenerator();
421 }
422
423 void MediaPlayerPrivateAVFoundationCF::createVideoLayer()
424 {
425     ASSERT(isMainThread());
426     ASSERT(supportsAcceleratedRendering());
427
428     if (m_avfWrapper)
429         m_avfWrapper->createAVCFVideoLayer();
430 }
431
432 void MediaPlayerPrivateAVFoundationCF::destroyVideoLayer()
433 {
434     ASSERT(isMainThread());
435     LOG(Media, "MediaPlayerPrivateAVFoundationCF::destroyVideoLayer(%p) - destroying %p", this, videoLayer(m_avfWrapper));
436     if (m_avfWrapper)
437         m_avfWrapper->destroyVideoLayer();
438 }
439
440 bool MediaPlayerPrivateAVFoundationCF::hasAvailableVideoFrame() const
441 {
442     return (m_videoFrameHasDrawn || (videoLayer(m_avfWrapper) && AVCFPlayerLayerIsReadyForDisplay(videoLayer(m_avfWrapper))));
443 }
444
445 void MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
446 {
447     if (m_avfWrapper)
448         m_avfWrapper->setCurrentTextTrack(track);
449 }
450
451 InbandTextTrackPrivateAVF* MediaPlayerPrivateAVFoundationCF::currentTextTrack() const
452 {
453     if (m_avfWrapper)
454         return m_avfWrapper->currentTextTrack();
455
456     return 0;
457 }
458
459 void MediaPlayerPrivateAVFoundationCF::createAVAssetForURL(const String& url)
460 {
461     ASSERT(!m_avfWrapper);
462
463     setDelayCallbacks(true);
464
465     bool inheritURI = player()->doesHaveAttribute("x-itunes-inherit-uri-query-component");
466
467     m_avfWrapper = new AVFWrapper(this);
468     m_avfWrapper->createAssetForURL(url, inheritURI);
469     setDelayCallbacks(false);
470 }
471
472 void MediaPlayerPrivateAVFoundationCF::createAVPlayer()
473 {
474     ASSERT(isMainThread());
475     ASSERT(m_avfWrapper);
476     
477     setDelayCallbacks(true);
478     m_avfWrapper->createPlayer(reinterpret_cast<IDirect3DDevice9*>(player()->graphicsDeviceAdapter()));
479     setDelayCallbacks(false);
480 }
481
482 void MediaPlayerPrivateAVFoundationCF::createAVPlayerItem()
483 {
484     ASSERT(isMainThread());
485     ASSERT(m_avfWrapper);
486     
487     setDelayCallbacks(true);
488     m_avfWrapper->createPlayerItem();
489
490     setDelayCallbacks(false);
491 }
492
493 void MediaPlayerPrivateAVFoundationCF::checkPlayability()
494 {
495     ASSERT(m_avfWrapper);
496     m_avfWrapper->checkPlayability();
497 }
498
499 void MediaPlayerPrivateAVFoundationCF::beginLoadingMetadata()
500 {
501     ASSERT(m_avfWrapper);
502     m_avfWrapper->beginLoadingMetadata();
503 }
504
505 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationCF::playerItemStatus() const
506 {
507     if (!avPlayerItem(m_avfWrapper))
508         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
509
510     AVCFPlayerItemStatus status = AVCFPlayerItemGetStatus(avPlayerItem(m_avfWrapper), 0);
511     if (status == AVCFPlayerItemStatusUnknown)
512         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
513     if (status == AVCFPlayerItemStatusFailed)
514         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
515     if (AVCFPlayerItemIsPlaybackLikelyToKeepUp(avPlayerItem(m_avfWrapper)))
516         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
517     if (AVCFPlayerItemIsPlaybackBufferFull(avPlayerItem(m_avfWrapper)))
518         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
519     if (AVCFPlayerItemIsPlaybackBufferEmpty(avPlayerItem(m_avfWrapper)))
520         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
521     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
522 }
523
524 PlatformMedia MediaPlayerPrivateAVFoundationCF::platformMedia() const
525 {
526     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformMedia(%p)", this);
527     PlatformMedia pm;
528     pm.type = PlatformMedia::AVFoundationCFMediaPlayerType;
529     pm.media.avcfMediaPlayer = (AVCFPlayer*)avPlayer(m_avfWrapper);
530     return pm;
531 }
532
533 PlatformLayer* MediaPlayerPrivateAVFoundationCF::platformLayer() const
534 {
535     ASSERT(isMainThread());
536     if (!m_avfWrapper)
537         return 0;
538
539     return m_avfWrapper->platformLayer();
540 }
541
542 void MediaPlayerPrivateAVFoundationCF::platformSetVisible(bool isVisible)
543 {
544     ASSERT(isMainThread());
545     if (!m_avfWrapper)
546         return;
547     
548     // FIXME: We use a CATransaction here on the Mac, we need to figure out why this was done there and
549     // whether we're affected by the same issue.
550     setDelayCallbacks(true);
551     m_avfWrapper->setVideoLayerHidden(!isVisible);    
552     if (!isVisible)
553         tearDownVideoRendering();
554     setDelayCallbacks(false);
555 }
556
557 void MediaPlayerPrivateAVFoundationCF::platformPlay()
558 {
559     LOG(Media, "MediaPlayerPrivateAVFoundationCF::play(%p)", this);
560     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
561         return;
562
563     setDelayCallbacks(true);
564     AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate());
565     setDelayCallbacks(false);
566 }
567
568 void MediaPlayerPrivateAVFoundationCF::platformPause()
569 {
570     LOG(Media, "MediaPlayerPrivateAVFoundationCF::pause(%p)", this);
571     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
572         return;
573
574     setDelayCallbacks(true);
575     AVCFPlayerSetRate(avPlayer(m_avfWrapper), 0);
576     setDelayCallbacks(false);
577 }
578
579 MediaTime MediaPlayerPrivateAVFoundationCF::platformDuration() const
580 {
581     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
582         return MediaTime::zeroTime();
583
584     CMTime cmDuration;
585
586     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
587     if (avPlayerItem(m_avfWrapper) && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
588         cmDuration = AVCFPlayerItemGetDuration(avPlayerItem(m_avfWrapper));
589     else
590         cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper));
591
592     if (CMTIME_IS_NUMERIC(cmDuration))
593         return toMediaTime(cmDuration);
594
595     if (CMTIME_IS_INDEFINITE(cmDuration))
596         return MediaTime::positiveInfiniteTime();
597
598     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
599     return MediaTime::invalidTime();
600 }
601
602 MediaTime MediaPlayerPrivateAVFoundationCF::currentMediaTime() const
603 {
604     if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper))
605         return MediaTime::zeroTime();
606
607     CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper));
608     if (CMTIME_IS_NUMERIC(itemTime))
609         return max(toMediaTime(itemTime), MediaTime::zeroTime());
610
611     return MediaTime::zeroTime();
612 }
613
614 void MediaPlayerPrivateAVFoundationCF::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
615 {
616     if (!m_avfWrapper)
617         return;
618     
619     // seekToTime generates several event callbacks, update afterwards.
620     setDelayCallbacks(true);
621     m_avfWrapper->seekToTime(time, negativeTolerance, positiveTolerance);
622     setDelayCallbacks(false);
623 }
624
625 void MediaPlayerPrivateAVFoundationCF::setVolume(float volume)
626 {
627     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
628         return;
629
630     AVCFPlayerSetVolume(avPlayer(m_avfWrapper), volume);
631 }
632
633 void MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(bool closedCaptionsVisible)
634 {
635     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
636         return;
637
638     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible));
639     AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(m_avfWrapper), closedCaptionsVisible);
640 }
641
642 void MediaPlayerPrivateAVFoundationCF::setRate(float rate)
643 {
644     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setRate(%p) - rate: %f", this, rate);
645     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
646         return;
647
648     setDelayCallbacks(true);
649     AVCFPlayerSetRate(avPlayer(m_avfWrapper), rate);
650     setDelayCallbacks(false);
651 }
652
653 double MediaPlayerPrivateAVFoundationCF::rate() const
654 {
655     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
656         return 0;
657
658     setDelayCallbacks(true);
659     double currentRate = AVCFPlayerGetRate(avPlayer(m_avfWrapper));
660     setDelayCallbacks(false);
661
662     return currentRate;
663 }
664
665 static bool timeRangeIsValidAndNotEmpty(CMTime start, CMTime duration)
666 {
667     // Is the range valid?
668     if (!CMTIME_IS_VALID(start) || !CMTIME_IS_VALID(duration) || duration.epoch || duration.value < 0)
669         return false;
670
671     if (CMTIME_COMPARE_INLINE(duration, ==, kCMTimeZero))
672         return false;
673
674     return true;
675 }
676
677 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationCF::platformBufferedTimeRanges() const
678 {
679     auto timeRanges = std::make_unique<PlatformTimeRanges>();
680
681     if (!avPlayerItem(m_avfWrapper))
682         return timeRanges;
683
684     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
685     if (!loadedRanges)
686         return timeRanges;
687
688     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
689     for (CFIndex i = 0; i < rangeCount; i++) {
690         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
691         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
692         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
693         
694         if (timeRangeIsValidAndNotEmpty(start, duration)) {
695             MediaTime rangeStart = toMediaTime(start);
696             MediaTime rangeEnd = rangeStart + toMediaTime(duration);
697             timeRanges->add(rangeStart, rangeEnd);
698         }
699     }
700
701     return timeRanges;
702 }
703
704 MediaTime MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 
705
706     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
707     if (!seekableRanges) 
708         return MediaTime::zeroTime(); 
709
710     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
711     bool hasValidRange = false; 
712     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
713     for (CFIndex i = 0; i < rangeCount; i++) {
714         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
715         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
716         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
717         if (!timeRangeIsValidAndNotEmpty(start, duration))
718             continue;
719
720         hasValidRange = true; 
721         MediaTime startOfRange = toMediaTime(start); 
722         if (minTimeSeekable > startOfRange) 
723             minTimeSeekable = startOfRange; 
724     } 
725     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime(); 
726
727
728 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
729 {
730     if (!avPlayerItem(m_avfWrapper))
731         return MediaTime::zeroTime();
732
733     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
734     if (!seekableRanges)
735         return MediaTime::zeroTime();
736
737     MediaTime maxTimeSeekable;
738     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
739     for (CFIndex i = 0; i < rangeCount; i++) {
740         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
741         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
742         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
743         if (!timeRangeIsValidAndNotEmpty(start, duration))
744             continue;
745         
746         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
747         if (maxTimeSeekable < endOfRange)
748             maxTimeSeekable = endOfRange;
749     }
750
751     return maxTimeSeekable;   
752 }
753
754 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
755 {
756     if (!avPlayerItem(m_avfWrapper))
757         return MediaTime::zeroTime();
758
759     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
760     if (!loadedRanges)
761         return MediaTime::zeroTime();
762
763     MediaTime maxTimeLoaded;
764     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
765     for (CFIndex i = 0; i < rangeCount; i++) {
766         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
767         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
768         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
769         if (!timeRangeIsValidAndNotEmpty(start, duration))
770             continue;
771         
772         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
773         if (maxTimeLoaded < endOfRange)
774             maxTimeLoaded = endOfRange;
775     }
776
777     return maxTimeLoaded;   
778 }
779
780 unsigned long long MediaPlayerPrivateAVFoundationCF::totalBytes() const
781 {
782     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
783         return 0;
784
785     int64_t totalMediaSize = 0;
786     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyAssetTracks(avAsset(m_avfWrapper)));
787     CFIndex trackCount = CFArrayGetCount(tracks.get());
788     for (CFIndex i = 0; i < trackCount; i++) {
789         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), i);
790         totalMediaSize += AVCFAssetTrackGetTotalSampleDataLength(assetTrack);
791     }
792
793     return static_cast<unsigned long long>(totalMediaSize);
794 }
795
796 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationCF::assetStatus() const
797 {
798     if (!avAsset(m_avfWrapper))
799         return MediaPlayerAVAssetStatusDoesNotExist;
800
801     // First, make sure all metadata properties we rely on are loaded.
802     CFArrayRef keys = metadataKeyNames();
803     CFIndex keyCount = CFArrayGetCount(keys);
804     for (CFIndex i = 0; i < keyCount; i++) {
805         CFStringRef keyName = static_cast<CFStringRef>(CFArrayGetValueAtIndex(keys, i));
806         AVCFPropertyValueStatus keyStatus = AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), keyName, 0);
807
808         if (keyStatus < AVCFPropertyValueStatusLoaded)
809             return MediaPlayerAVAssetStatusLoading;
810         if (keyStatus == AVCFPropertyValueStatusFailed) {
811             if (CFStringCompare(keyName, AVCFAssetPropertyNaturalSize, 0) == kCFCompareEqualTo) {
812                 // Don't treat a failure to retrieve @"naturalSize" as fatal. We will use @"presentationSize" instead.
813                 // <rdar://problem/15966685>
814                 continue;
815             }
816 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
817             if (CFStringCompare(keyName, AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) == kCFCompareEqualTo) {
818                 // On Windows, the media selection options are not available when initially interacting with a streaming source.
819                 // <rdar://problem/16160699>
820                 continue;
821             }
822 #endif
823             return MediaPlayerAVAssetStatusFailed;
824         }
825         if (keyStatus == AVCFPropertyValueStatusCancelled)
826             return MediaPlayerAVAssetStatusCancelled;
827     }
828
829     if (AVCFAssetIsPlayable(avAsset(m_avfWrapper)))
830         return MediaPlayerAVAssetStatusPlayable;
831
832     return MediaPlayerAVAssetStatusLoaded;
833 }
834
835 void MediaPlayerPrivateAVFoundationCF::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
836 {
837     ASSERT(isMainThread());
838     if (!metaDataAvailable() || context.paintingDisabled())
839         return;
840
841     if (currentRenderingMode() == MediaRenderingToLayer && !imageGenerator(m_avfWrapper)) {
842         // We're being told to render into a context, but we already have the
843         // video layer, which probably means we've been called from <canvas>.
844         createContextVideoRenderer();
845     }
846
847     paint(context, rect);
848 }
849
850 void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext& context, const FloatRect& rect)
851 {
852     ASSERT(isMainThread());
853     if (!metaDataAvailable() || context.paintingDisabled() || !imageGenerator(m_avfWrapper))
854         return;
855
856     LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this);
857
858     setDelayCallbacks(true);
859     RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentMediaTime(), rect);
860     if (image) {
861         context.save();
862         context.translate(rect.x(), rect.y() + rect.height());
863         context.scale(FloatSize(1.0f, -1.0f));
864         context.setImageInterpolationQuality(InterpolationLow);
865         FloatRect paintRect(FloatPoint(), rect.size());
866         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
867         context.restore();
868         image = 0;
869     }
870     setDelayCallbacks(false);
871     
872     m_videoFrameHasDrawn = true;
873 }
874
875 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
876
877 static bool keySystemIsSupported(const String& keySystem)
878 {
879     return equalLettersIgnoringASCIICase(keySystem, "com.apple.fps")
880         || equalLettersIgnoringASCIICase(keySystem, "com.apple.fps.1_0");
881 }
882
883 #endif
884
885 static const HashSet<String, ASCIICaseInsensitiveHash>& avfMIMETypes()
886 {
887     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []() {
888         HashSet<String, ASCIICaseInsensitiveHash> types;
889         RetainPtr<CFArrayRef> avTypes = adoptCF(AVCFURLAssetCopyAudiovisualMIMETypes());
890         CFIndex typeCount = CFArrayGetCount(avTypes.get());
891         for (CFIndex i = 0; i < typeCount; ++i)
892             types.add((CFStringRef)CFArrayGetValueAtIndex(avTypes.get(), i));
893         return types;
894     }();
895
896     return cache;
897 }
898
899 void MediaPlayerPrivateAVFoundationCF::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
900 {
901     supportedTypes = avfMIMETypes();
902 }
903
904 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationCF::supportsType(const MediaEngineSupportParameters& parameters)
905 {
906     if (isUnsupportedMIMEType(parameters.type))
907         return MediaPlayer::IsNotSupported;
908
909     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
910         return MediaPlayer::IsNotSupported;
911
912 #if HAVE(AVCFURL_PLAYABLE_MIMETYPE)
913     // The spec says:
914     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
915     if (parameters.codecs.isEmpty())
916         return MediaPlayer::MayBeSupported;
917
918     String typeString = parameters.type + "; codecs=\"" + parameters.codecs + "\"";
919     return AVCFURLAssetIsPlayableExtendedMIMEType(typeString.createCFString().get()) ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
920 #else
921     if (avfMIMETypes().contains(parameters.type))
922         return parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
923     return MediaPlayer::IsNotSupported;
924 #endif
925 }
926
927 bool MediaPlayerPrivateAVFoundationCF::supportsKeySystem(const String& keySystem, const String& mimeType)
928 {
929 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
930     if (keySystem.isEmpty())
931         return false;
932
933     if (!keySystemIsSupported(keySystem))
934         return false;
935
936     if (!mimeType.isEmpty() && !avfMIMETypes().contains(mimeType))
937         return false;
938
939     return true;
940 #else
941     UNUSED_PARAM(keySystem);
942     UNUSED_PARAM(mimeType);
943     return false;
944 #endif
945 }
946
947 bool MediaPlayerPrivateAVFoundationCF::isAvailable()
948 {
949     return AVFoundationCFLibrary() && isCoreMediaFrameworkAvailable();
950 }
951
952 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
953 void MediaPlayerPrivateAVFoundationCF::didCancelLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
954 {
955     WebCoreAVCFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
956
957     if (resourceLoader)
958         resourceLoader->stopLoading();
959 }
960
961 void MediaPlayerPrivateAVFoundationCF::didStopLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
962 {
963     m_resourceLoaderMap.remove(avRequest);
964 }
965 #endif
966
967 MediaTime MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(const MediaTime& timeValue) const
968 {
969     if (!metaDataAvailable())
970         return timeValue;
971
972     // FIXME - can not implement until rdar://8721669 is fixed.
973     return timeValue;
974 }
975
976 void MediaPlayerPrivateAVFoundationCF::tracksChanged()
977 {
978     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
979     m_languageOfPrimaryAudioTrack = String();
980
981     if (!avAsset(m_avfWrapper))
982         return;
983
984     setDelayCharacteristicsChangedNotification(true);
985
986     bool haveCCTrack = false;
987     bool hasCaptions = false;
988
989     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
990     // asked about those fairly frequently.
991     if (!avPlayerItem(m_avfWrapper)) {
992         // We don't have a player item yet, so check with the asset because some assets support inspection
993         // prior to becoming ready to play.
994         RetainPtr<CFArrayRef> visualTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
995         setHasVideo(CFArrayGetCount(visualTracks.get()));
996
997         RetainPtr<CFArrayRef> audioTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible));
998         setHasAudio(CFArrayGetCount(audioTracks.get()));
999
1000 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1001         RetainPtr<CFArrayRef> captionTracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeClosedCaption));
1002         hasCaptions = CFArrayGetCount(captionTracks.get());
1003 #endif
1004     } else {
1005         bool hasVideo = false;
1006         bool hasAudio = false;
1007
1008         RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1009
1010         CFIndex trackCount = CFArrayGetCount(tracks.get());
1011         for (CFIndex i = 0; i < trackCount; i++) {
1012             AVCFPlayerItemTrackRef track = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1013             
1014             if (AVCFPlayerItemTrackIsEnabled(track)) {
1015                 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(track));
1016                 if (!assetTrack) {
1017                     // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1018                     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1019                     continue;
1020                 }
1021                 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1022                 if (!mediaType)
1023                     continue;
1024                 
1025                 if (CFStringCompare(mediaType, AVCFMediaTypeVideo, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1026                     hasVideo = true;
1027                 else if (CFStringCompare(mediaType, AVCFMediaTypeAudio, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1028                     hasAudio = true;
1029                 else if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) == kCFCompareEqualTo) {
1030 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1031                     hasCaptions = true;
1032 #endif
1033                     haveCCTrack = true;
1034                 }
1035             }
1036         }
1037
1038         setHasVideo(hasVideo);
1039         setHasAudio(hasAudio);
1040     }
1041
1042 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1043     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1044     if (legibleGroup) {
1045         RetainPtr<CFArrayRef> playableOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1046         hasCaptions = CFArrayGetCount(playableOptions.get());
1047         if (hasCaptions)
1048             processMediaSelectionOptions();
1049     }
1050 #endif
1051
1052 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1053     if (haveCCTrack)
1054         processLegacyClosedCaptionsTracks();
1055 #endif
1056
1057     setHasClosedCaptions(hasCaptions);
1058
1059     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s", 
1060         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1061
1062     sizeChanged();
1063
1064     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1065         characteristicsChanged();
1066
1067     setDelayCharacteristicsChangedNotification(false);
1068 }
1069
1070 void MediaPlayerPrivateAVFoundationCF::sizeChanged()
1071 {
1072     ASSERT(isMainThread());
1073     if (!avAsset(m_avfWrapper))
1074         return;
1075     
1076     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1077     // the union of all visual track rects.
1078     CGRect trackRectUnion = CGRectZero;
1079     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
1080     CFIndex trackCount = CFArrayGetCount(tracks.get());
1081     for (CFIndex i = 0; i < trackCount; i++) {
1082         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1083         
1084         CGSize trackSize = AVCFAssetTrackGetNaturalSize(assetTrack);
1085         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1086         trackRectUnion = CGRectUnion(trackRectUnion, CGRectApplyAffineTransform(trackRect, AVCFAssetTrackGetPreferredTransform(assetTrack)));
1087     }
1088     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1089     trackRectUnion = CGRectOffset(trackRectUnion, trackRectUnion.origin.x, trackRectUnion.origin.y);
1090     CGSize naturalSize = trackRectUnion.size;
1091
1092     if (!naturalSize.height && !naturalSize.width && avPlayerItem(m_avfWrapper))
1093         naturalSize = AVCFPlayerItemGetPresentationSize(avPlayerItem(m_avfWrapper));
1094
1095     // Also look at the asset's preferred transform so we account for a movie matrix.
1096     CGSize movieSize = CGSizeApplyAffineTransform(AVCFAssetGetNaturalSize(avAsset(m_avfWrapper)), AVCFAssetGetPreferredTransform(avAsset(m_avfWrapper)));
1097     if (movieSize.width > naturalSize.width)
1098         naturalSize.width = movieSize.width;
1099     if (movieSize.height > naturalSize.height)
1100         naturalSize.height = movieSize.height;
1101     setNaturalSize(IntSize(naturalSize));
1102 }
1103
1104 bool MediaPlayerPrivateAVFoundationCF::requiresImmediateCompositing() const
1105 {
1106     // The AVFoundationCF player needs to have the root compositor available at construction time
1107     // so it can attach to the rendering device. Otherwise it falls back to CPU-only mode.
1108     //
1109     // It would be nice if AVCFPlayer had some way to switch to hardware-accelerated mode
1110     // when asked, then we could follow AVFoundation's model and switch to compositing
1111     // mode when beginning to play media.
1112     return true;
1113 }
1114
1115 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
1116 RetainPtr<AVCFAssetResourceLoadingRequestRef> MediaPlayerPrivateAVFoundationCF::takeRequestForKeyURI(const String& keyURI)
1117 {
1118     if (!m_avfWrapper)
1119         return nullptr;
1120
1121     return m_avfWrapper->takeRequestForKeyURI(keyURI);
1122 }
1123
1124 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem, CDMSessionClient* client)
1125 {
1126     if (!keySystemIsSupported(keySystem))
1127         return nullptr;
1128
1129     return std::make_unique<CDMSessionAVFoundationCF>(this, client);
1130 }
1131 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1132 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem, , CDMSessionClient*)
1133 {
1134     return nullptr;
1135 }
1136 #endif
1137
1138 long MediaPlayerPrivateAVFoundationCF::assetErrorCode() const
1139 {
1140     if (!avAsset(m_avfWrapper))
1141         return 0;
1142
1143     CFErrorRef error = nullptr;
1144     AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), AVCFAssetPropertyPlayable, &error);
1145     if (!error)
1146         return 0;
1147
1148     long code = CFErrorGetCode(error);
1149     CFRelease(error);
1150     return code;
1151 }
1152
1153 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1154 void MediaPlayerPrivateAVFoundationCF::processLegacyClosedCaptionsTracks()
1155 {
1156 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1157     AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), 0, safeMediaSelectionGroupForLegibleMedia(m_avfWrapper));
1158 #endif
1159
1160     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1161     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1162     CFIndex trackCount = CFArrayGetCount(tracks.get());
1163     for (CFIndex i = 0; i < trackCount; ++i) {
1164         AVCFPlayerItemTrackRef playerItemTrack = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1165
1166         RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(playerItemTrack));
1167         if (!assetTrack) {
1168             // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1169             LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1170             continue;
1171         }
1172         CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1173         if (!mediaType)
1174             continue;
1175                 
1176         if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) != kCFCompareEqualTo)
1177             continue;
1178
1179         bool newCCTrack = true;
1180         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1181             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1182                 continue;
1183
1184             RefPtr<InbandTextTrackPrivateLegacyAVCF> track = static_cast<InbandTextTrackPrivateLegacyAVCF*>(m_textTracks[i - 1].get());
1185             if (track->avPlayerItemTrack() == playerItemTrack) {
1186                 removedTextTracks.remove(i - 1);
1187                 newCCTrack = false;
1188                 break;
1189             }
1190         }
1191
1192         if (!newCCTrack)
1193             continue;
1194         
1195         m_textTracks.append(InbandTextTrackPrivateLegacyAVCF::create(this, playerItemTrack));
1196     }
1197
1198     processNewAndRemovedTextTracks(removedTextTracks);
1199 }
1200 #endif
1201
1202 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1203 void MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions()
1204 {
1205     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1206     if (!legibleGroup) {
1207         LOG(Media, "MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1208         return;
1209     }
1210
1211     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1212     // but set the selected legible track to nil so text tracks will not be automatically configured.
1213     if (!m_textTracks.size() && AVCFMediaSelectionGroupAllowsEmptySelection(legibleGroup)) {
1214         if (AVCFPlayerItemRef playerItem = avPlayerItem(m_avfWrapper))
1215             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(playerItem, 0, legibleGroup);
1216     }
1217
1218     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1219     RetainPtr<CFArrayRef> legibleOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1220     CFIndex legibleOptionsCount = CFArrayGetCount(legibleOptions.get());
1221     for (CFIndex i = 0; i < legibleOptionsCount; ++i) {
1222         AVCFMediaSelectionOptionRef option = static_cast<AVCFMediaSelectionOptionRef>(CFArrayGetValueAtIndex(legibleOptions.get(), i));
1223         bool newTrack = true;
1224         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1225             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1226                 continue;
1227
1228             RefPtr<InbandTextTrackPrivateAVCF> track = static_cast<InbandTextTrackPrivateAVCF*>(removedTextTracks[i - 1].get());
1229             if (CFEqual(track->mediaSelectionOption(), option)) {
1230                 removedTextTracks.remove(i - 1);
1231                 newTrack = false;
1232                 break;
1233             }
1234         }
1235         if (!newTrack)
1236             continue;
1237
1238         m_textTracks.append(InbandTextTrackPrivateAVCF::create(this, option, InbandTextTrackPrivate::Generic));
1239     }
1240
1241     processNewAndRemovedTextTracks(removedTextTracks);
1242 }
1243
1244 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1245
1246 void AVFWrapper::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
1247 {
1248     if (m_currentTextTrack == track)
1249         return;
1250
1251     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1252         
1253     m_currentTextTrack = track;
1254
1255     if (track) {
1256         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1257             AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), TRUE);
1258 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1259         else
1260             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), static_cast<InbandTextTrackPrivateAVCF*>(track)->mediaSelectionOption(), safeMediaSelectionGroupForLegibleMedia());
1261 #endif
1262     } else {
1263 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1264         AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), 0, safeMediaSelectionGroupForLegibleMedia());
1265 #endif
1266         AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), FALSE);
1267     }
1268 }
1269
1270 String MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack() const
1271 {
1272     if (!m_languageOfPrimaryAudioTrack.isNull())
1273         return m_languageOfPrimaryAudioTrack;
1274
1275     if (!avPlayerItem(m_avfWrapper))
1276         return emptyString();
1277
1278 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1279     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1280     AVCFMediaSelectionGroupRef audibleGroup = AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible);
1281     AVCFMediaSelectionOptionRef currentlySelectedAudibleOption = AVCFPlayerItemGetSelectedMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), audibleGroup);
1282     if (currentlySelectedAudibleOption) {
1283         RetainPtr<CFLocaleRef> audibleOptionLocale = adoptCF(AVCFMediaSelectionOptionCopyLocale(currentlySelectedAudibleOption));
1284         if (audibleOptionLocale)
1285             m_languageOfPrimaryAudioTrack = CFLocaleGetIdentifier(audibleOptionLocale.get());
1286         else
1287             m_languageOfPrimaryAudioTrack = emptyString();
1288
1289         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1290
1291         return m_languageOfPrimaryAudioTrack;
1292     }
1293 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1294
1295     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1296     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1297     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeAudio));
1298     CFIndex trackCount = CFArrayGetCount(tracks.get());
1299     if (!tracks || trackCount != 1) {
1300         m_languageOfPrimaryAudioTrack = emptyString();
1301         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? trackCount : 0));
1302         return m_languageOfPrimaryAudioTrack;
1303     }
1304
1305     AVCFAssetTrackRef track = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), 0);
1306     RetainPtr<CFStringRef> language = adoptCF(AVCFAssetTrackCopyExtendedLanguageTag(track));
1307
1308     // If the language code is stored as a QuickTime 5-bit packed code there aren't enough bits for a full
1309     // RFC 4646 language tag so extendedLanguageTag returns null. In this case languageCode will return the
1310     // ISO 639-2/T language code so check it.
1311     if (!language)
1312         language = adoptCF(AVCFAssetTrackCopyLanguageCode(track));
1313
1314     // Some legacy tracks have "und" as a language, treat that the same as no language at all.
1315     if (language && CFStringCompare(language.get(), CFSTR("und"), kCFCompareCaseInsensitive) != kCFCompareEqualTo) {
1316         m_languageOfPrimaryAudioTrack = language.get();
1317         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1318         return m_languageOfPrimaryAudioTrack;
1319     }
1320
1321     LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1322     m_languageOfPrimaryAudioTrack = emptyString();
1323     return m_languageOfPrimaryAudioTrack;
1324 }
1325
1326 void MediaPlayerPrivateAVFoundationCF::contentsNeedsDisplay()
1327 {
1328     if (m_avfWrapper)
1329         m_avfWrapper->setVideoLayerNeedsCommit();
1330 }
1331
1332 AVFWrapper::AVFWrapper(MediaPlayerPrivateAVFoundationCF* owner)
1333     : m_owner(owner)
1334     , m_objectID(s_nextAVFWrapperObjectID++)
1335     , m_currentTextTrack(0)
1336 {
1337     ASSERT(isMainThread());
1338     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1339     LOG(Media, "AVFWrapper::AVFWrapper(%p)", this);
1340
1341     m_notificationQueue = dispatch_queue_create("MediaPlayerPrivateAVFoundationCF.notificationQueue", 0);
1342
1343 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1344     m_resourceLoaderCallbacks.version = kAVCFAssetResourceLoader_CallbacksVersion_1;
1345     m_resourceLoaderCallbacks.context = nullptr;
1346     m_resourceLoaderCallbacks.resourceLoaderShouldWaitForLoadingOfRequestedResource = AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource;
1347 #endif
1348
1349     addToMap();
1350 }
1351
1352 AVFWrapper::~AVFWrapper()
1353 {
1354     ASSERT(isMainThread());
1355     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1356     LOG(Media, "AVFWrapper::~AVFWrapper(%p %d)", this, m_objectID);
1357
1358     destroyVideoLayer();
1359     destroyImageGenerator();
1360
1361     if (m_notificationQueue)
1362         dispatch_release(m_notificationQueue);
1363
1364     if (avAsset()) {
1365         AVCFAssetCancelLoading(avAsset());
1366         m_avAsset = 0;
1367     }
1368
1369 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1370     if (legibleOutput()) {
1371         if (avPlayerItem())
1372             AVCFPlayerItemRemoveOutput(avPlayerItem(), legibleOutput());
1373         m_legibleOutput = 0;
1374     }
1375 #endif
1376
1377     m_avPlayerItem = 0;
1378     m_timeObserver = 0;
1379     m_avPlayer = 0;
1380 }
1381
1382 Lock& AVFWrapper::mapLock()
1383 {
1384     static Lock mapLock;
1385     return mapLock;
1386 }
1387
1388 HashMap<uintptr_t, AVFWrapper*>& AVFWrapper::map()
1389 {
1390     static HashMap<uintptr_t, AVFWrapper*>& map = *new HashMap<uintptr_t, AVFWrapper*>;
1391     return map;
1392 }
1393
1394 void AVFWrapper::addToMap()
1395 {
1396     LockHolder locker(mapLock());
1397     
1398     // HashMap doesn't like a key of 0, and also make sure we aren't
1399     // using an object ID that's already in use.
1400     while (!m_objectID || (map().find(m_objectID) != map().end()))
1401         m_objectID = s_nextAVFWrapperObjectID++;
1402        
1403     LOG(Media, "AVFWrapper::addToMap(%p %d)", this, m_objectID);
1404
1405     map().add(m_objectID, this);
1406 }
1407
1408 void AVFWrapper::removeFromMap() const
1409 {
1410     LOG(Media, "AVFWrapper::removeFromMap(%p %d)", this, m_objectID);
1411
1412     LockHolder locker(mapLock());
1413     map().remove(m_objectID);
1414 }
1415
1416 AVFWrapper* AVFWrapper::avfWrapperForCallbackContext(void* context)
1417 {
1418     // Assumes caller has locked mapLock().
1419     HashMap<uintptr_t, AVFWrapper*>::iterator it = map().find(reinterpret_cast<uintptr_t>(context));
1420     if (it == map().end())
1421         return 0;
1422
1423     return it->value;
1424 }
1425
1426 void AVFWrapper::scheduleDisconnectAndDelete()
1427 {
1428     // Ignore any subsequent notifications we might receive in notificationCallback().
1429     removeFromMap();
1430
1431     dispatch_async_f(dispatchQueue(), this, disconnectAndDeleteAVFWrapper);
1432 }
1433
1434 static void destroyAVFWrapper(void* context)
1435 {
1436     ASSERT(isMainThread());
1437     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1438     if (!avfWrapper)
1439         return;
1440
1441     delete avfWrapper;
1442 }
1443
1444 void AVFWrapper::disconnectAndDeleteAVFWrapper(void* context)
1445 {
1446     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1447
1448     LOG(Media, "AVFWrapper::disconnectAndDeleteAVFWrapper(%p)", avfWrapper);
1449
1450     if (avfWrapper->avPlayerItem()) {
1451         CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1452         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDidPlayToEndTimeNotification, avfWrapper->avPlayerItem());
1453         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemStatusChangedNotification, avfWrapper->avPlayerItem());
1454         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemTracksChangedNotification, avfWrapper->avPlayerItem());
1455         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemSeekableTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1456         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemLoadedTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1457         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemPresentationSizeChangedNotification, avfWrapper->avPlayerItem());
1458         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, avfWrapper->avPlayerItem());
1459         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, avfWrapper->avPlayerItem());
1460         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferFullChangedNotification, avfWrapper->avPlayerItem());
1461         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDurationChangedNotification, avfWrapper->avPlayerItem());
1462         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), CACFContextNeedsFlushNotification(), 0);
1463     }
1464
1465     if (avfWrapper->avPlayer()) {
1466         if (avfWrapper->timeObserver())
1467             AVCFPlayerRemoveObserver(avfWrapper->avPlayer(), avfWrapper->timeObserver());
1468
1469         CFNotificationCenterRemoveObserver(CFNotificationCenterGetLocalCenter(), avfWrapper->callbackContext(), AVCFPlayerRateChangedNotification, avfWrapper->avPlayer());
1470     }
1471
1472 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1473     AVCFPlayerItemRemoveOutput(avfWrapper->avPlayerItem(), avfWrapper->legibleOutput());
1474 #endif
1475
1476     // We must release the AVCFPlayer and other items on the same thread that created them.
1477     dispatch_async_f(dispatch_get_main_queue(), context, destroyAVFWrapper);
1478 }
1479
1480 void AVFWrapper::createAssetForURL(const String& url, bool inheritURI)
1481 {
1482     ASSERT(!avAsset());
1483
1484     RetainPtr<CFURLRef> urlRef = URL(ParsedURLString, url).createCFURL();
1485
1486     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1487
1488     if (inheritURI)
1489         CFDictionarySetValue(optionsRef.get(), AVCFURLAssetInheritURIQueryComponentFromReferencingURIKey, kCFBooleanTrue);
1490
1491     m_avAsset = adoptCF(AVCFURLAssetCreateWithURLAndOptions(kCFAllocatorDefault, urlRef.get(), optionsRef.get(), m_notificationQueue));
1492
1493 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1494     ASSERT(callbackContext());
1495     m_resourceLoaderCallbacks.context = callbackContext();
1496
1497     AVCFAssetResourceLoaderRef resourceLoader = AVCFURLAssetGetResourceLoader(m_avAsset.get());
1498     AVCFAssetResourceLoaderSetCallbacks(resourceLoader, &m_resourceLoaderCallbacks, globalLoaderDelegateQueue());
1499 #endif
1500 }
1501
1502 void AVFWrapper::createPlayer(IDirect3DDevice9* d3dDevice)
1503 {
1504     ASSERT(isMainThread());
1505     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1506     ASSERT(avPlayerItem());
1507
1508     if (avPlayer())
1509         return;
1510
1511     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1512
1513     if (d3dDevice) {
1514         // QI for an IDirect3DDevice9Ex interface, it is required to do HW video decoding.
1515         COMPtr<IDirect3DDevice9Ex> d3dEx(Query, d3dDevice);
1516         m_d3dDevice = d3dEx;
1517     } else
1518         m_d3dDevice = 0;
1519
1520     if (m_d3dDevice && AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey)
1521         CFDictionarySetValue(optionsRef.get(), AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey, kCFBooleanTrue);
1522
1523 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1524     CFDictionarySetValue(optionsRef.get(), AVCFPlayerAppliesMediaSelectionCriteriaAutomaticallyKey, kCFBooleanTrue);
1525 #endif
1526
1527     // FIXME: We need a way to create a AVPlayer without an AVPlayerItem, see <rdar://problem/9877730>.
1528     AVCFPlayerRef playerRef = AVCFPlayerCreateWithPlayerItemAndOptions(kCFAllocatorDefault, avPlayerItem(), optionsRef.get(), m_notificationQueue);
1529     m_avPlayer = adoptCF(playerRef);
1530 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1531     AVCFPlayerSetClosedCaptionDisplayEnabled(playerRef, FALSE);
1532 #endif
1533
1534     if (m_d3dDevice && AVCFPlayerSetDirect3DDevicePtr())
1535         AVCFPlayerSetDirect3DDevicePtr()(playerRef, m_d3dDevice.get());
1536
1537     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1538     ASSERT(center);
1539
1540     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerRateChangedNotification, playerRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1541
1542     // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but
1543     // our observer will also be called whenever a seek happens.
1544     const double veryLongInterval = 60*60*60*24*30;
1545     m_timeObserver = adoptCF(AVCFPlayerCreatePeriodicTimeObserverForInterval(playerRef, CMTimeMake(veryLongInterval, 10), m_notificationQueue, &periodicTimeObserverCallback, callbackContext()));
1546 }
1547
1548 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1549 static RetainPtr<CFArrayRef> createLegibleOutputSubtypes()
1550 {
1551     int webVTTInt = 'wvtt'; // kCMSubtitleFormatType_WebVTT;
1552     RetainPtr<CFNumberRef> webVTTNumber = adoptCF(CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &webVTTInt));
1553     CFTypeRef formatTypes[] = { webVTTNumber.get() };
1554     return adoptCF(CFArrayCreate(0, formatTypes, WTF_ARRAY_LENGTH(formatTypes), &kCFTypeArrayCallBacks));
1555 }
1556 #endif
1557
1558 void AVFWrapper::createPlayerItem()
1559 {
1560     ASSERT(isMainThread());
1561     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1562     ASSERT(avAsset());
1563
1564     if (avPlayerItem())
1565         return;
1566
1567     // Create the player item so we begin loading media data.
1568     AVCFPlayerItemRef itemRef = AVCFPlayerItemCreateWithAsset(kCFAllocatorDefault, avAsset(), m_notificationQueue);
1569     m_avPlayerItem = adoptCF(itemRef);
1570
1571     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1572     ASSERT(center);
1573
1574     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDidPlayToEndTimeNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1575     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemStatusChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1576     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemTracksChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1577     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemSeekableTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1578     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemLoadedTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1579     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemPresentationSizeChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1580     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1581     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1582     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferFullChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1583     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDurationChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1584     // FIXME: Are there other legible output things we need to register for? asset and hasEnabledAudio are not exposed by AVCF
1585
1586     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, CACFContextNeedsFlushNotification(), 0, CFNotificationSuspensionBehaviorDeliverImmediately);
1587
1588 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1589     const CFTimeInterval legibleOutputAdvanceInterval = 2;
1590
1591     m_legibleOutput = adoptCF(AVCFPlayerItemLegibleOutputCreateWithMediaSubtypesForNativeRepresentation(kCFAllocatorDefault, createLegibleOutputSubtypes().get()));
1592     AVCFPlayerItemOutputSetSuppressPlayerRendering(m_legibleOutput.get(), TRUE);
1593
1594     AVCFPlayerItemLegibleOutputCallbacks callbackInfo;
1595 #if HAVE(AVCFPLAYERITEM_CALLBACK_VERSION_2)
1596     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_2;
1597 #else
1598     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_1;
1599 #endif
1600     ASSERT(callbackContext());
1601     callbackInfo.context = callbackContext();
1602     callbackInfo.legibleOutputCallback = AVFWrapper::legibleOutputCallback;
1603
1604     AVCFPlayerItemLegibleOutputSetCallbacks(m_legibleOutput.get(), &callbackInfo, dispatchQueue());
1605     AVCFPlayerItemLegibleOutputSetAdvanceIntervalForCallbackInvocation(m_legibleOutput.get(), legibleOutputAdvanceInterval);
1606     AVCFPlayerItemLegibleOutputSetTextStylingResolution(m_legibleOutput.get(), AVCFPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly);
1607     AVCFPlayerItemAddOutput(m_avPlayerItem.get(), m_legibleOutput.get());
1608 #endif
1609 }
1610
1611 void AVFWrapper::periodicTimeObserverCallback(AVCFPlayerRef, CMTime cmTime, void* context)
1612 {
1613     LockHolder locker(mapLock());
1614     AVFWrapper* self = avfWrapperForCallbackContext(context);
1615     if (!self) {
1616         LOG(Media, "AVFWrapper::periodicTimeObserverCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1617         return;
1618     }
1619
1620     double time = std::max(0.0, CMTimeGetSeconds(cmTime)); // Clamp to zero, negative values are sometimes reported.
1621     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time);
1622 }
1623
1624 struct NotificationCallbackData {
1625     RetainPtr<CFStringRef> m_propertyName;
1626     void* m_context;
1627
1628     NotificationCallbackData(CFStringRef propertyName, void* context)
1629         : m_propertyName(propertyName), m_context(context)
1630     {
1631     }
1632 };
1633
1634 void AVFWrapper::processNotification(void* context)
1635 {
1636     ASSERT(isMainThread());
1637     ASSERT(context);
1638
1639     if (!context)
1640         return;
1641
1642     std::unique_ptr<NotificationCallbackData> notificationData { static_cast<NotificationCallbackData*>(context) };
1643
1644     LockHolder locker(mapLock());
1645     AVFWrapper* self = avfWrapperForCallbackContext(notificationData->m_context);
1646     if (!self) {
1647         LOG(Media, "AVFWrapper::processNotification invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1648         return;
1649     }
1650
1651     CFStringRef propertyName = notificationData->m_propertyName.get();
1652
1653     if (CFEqual(propertyName, AVCFPlayerItemDidPlayToEndTimeNotification))
1654         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
1655     else if (CFEqual(propertyName, AVCFPlayerItemTracksChangedNotification))
1656         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1657     else if (CFEqual(propertyName, AVCFPlayerItemStatusChangedNotification)) {
1658         AVCFURLAssetRef asset = AVCFPlayerItemGetAsset(self->avPlayerItem());
1659         if (asset)
1660             self->setAsset(asset);
1661         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
1662     } else if (CFEqual(propertyName, AVCFPlayerItemSeekableTimeRangesChangedNotification))
1663         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
1664     else if (CFEqual(propertyName, AVCFPlayerItemLoadedTimeRangesChangedNotification))
1665         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
1666     else if (CFEqual(propertyName, AVCFPlayerItemPresentationSizeChangedNotification))
1667         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
1668     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification))
1669         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
1670     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification))
1671         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
1672     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferFullChangedNotification))
1673         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
1674     else if (CFEqual(propertyName, AVCFPlayerRateChangedNotification))
1675         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
1676     else if (CFEqual(propertyName, CACFContextNeedsFlushNotification()))
1677         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ContentsNeedsDisplay);
1678     else if (CFEqual(propertyName, AVCFPlayerItemDurationChangedNotification))
1679         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged);
1680     else
1681         ASSERT_NOT_REACHED();
1682 }
1683
1684 void AVFWrapper::notificationCallback(CFNotificationCenterRef, void* observer, CFStringRef propertyName, const void* object, CFDictionaryRef)
1685 {
1686 #if !LOG_DISABLED
1687     char notificationName[256];
1688     CFStringGetCString(propertyName, notificationName, sizeof(notificationName), kCFStringEncodingASCII);
1689     LOG(Media, "AVFWrapper::notificationCallback(if=%d) %s", reinterpret_cast<uintptr_t>(observer), notificationName);
1690 #endif
1691
1692     auto notificationData = std::make_unique<NotificationCallbackData>(propertyName, observer);
1693
1694     dispatch_async_f(dispatch_get_main_queue(), notificationData.release(), processNotification);
1695 }
1696
1697 void AVFWrapper::loadPlayableCompletionCallback(AVCFAssetRef, void* context)
1698 {
1699     LockHolder locker(mapLock());
1700     AVFWrapper* self = avfWrapperForCallbackContext(context);
1701     if (!self) {
1702         LOG(Media, "AVFWrapper::loadPlayableCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1703         return;
1704     }
1705
1706     LOG(Media, "AVFWrapper::loadPlayableCompletionCallback(%p)", self);
1707     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1708 }
1709
1710 void AVFWrapper::checkPlayability()
1711 {
1712     LOG(Media, "AVFWrapper::checkPlayability(%p)", this);
1713
1714     static CFArrayRef propertyKeyName;
1715     if (!propertyKeyName) {
1716         static const CFStringRef keyNames[] = { 
1717             AVCFAssetPropertyPlayable
1718         };
1719         propertyKeyName = CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
1720     }
1721
1722     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), propertyKeyName, loadPlayableCompletionCallback, callbackContext());
1723 }
1724
1725 void AVFWrapper::loadMetadataCompletionCallback(AVCFAssetRef, void* context)
1726 {
1727     LockHolder locker(mapLock());
1728     AVFWrapper* self = avfWrapperForCallbackContext(context);
1729     if (!self) {
1730         LOG(Media, "AVFWrapper::loadMetadataCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1731         return;
1732     }
1733
1734     LOG(Media, "AVFWrapper::loadMetadataCompletionCallback(%p)", self);
1735     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
1736 }
1737
1738 void AVFWrapper::beginLoadingMetadata()
1739 {
1740     ASSERT(avAsset());
1741     LOG(Media, "AVFWrapper::beginLoadingMetadata(%p) - requesting metadata loading", this);
1742     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), metadataKeyNames(), loadMetadataCompletionCallback, callbackContext());
1743 }
1744
1745 void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void* context)
1746 {
1747     LockHolder locker(mapLock());
1748     AVFWrapper* self = avfWrapperForCallbackContext(context);
1749     if (!self) {
1750         LOG(Media, "AVFWrapper::seekCompletedCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1751         return;
1752     }
1753
1754     LOG(Media, "AVFWrapper::seekCompletedCallback(%p)", self);
1755     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
1756 }
1757
1758 void AVFWrapper::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1759 {
1760     ASSERT(avPlayerItem());
1761     CMTime cmTime = toCMTime(time);
1762     CMTime cmBefore = toCMTime(negativeTolerance);
1763     CMTime cmAfter = toCMTime(positiveTolerance);
1764     AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), cmTime, cmBefore, cmAfter, &seekCompletedCallback, callbackContext());
1765 }
1766
1767 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1768 struct LegibleOutputData {
1769     RetainPtr<CFArrayRef> m_attributedStrings;
1770     RetainPtr<CFArrayRef> m_samples;
1771     MediaTime m_time;
1772     void* m_context;
1773
1774     LegibleOutputData(CFArrayRef strings, CFArrayRef samples, const MediaTime &time, void* context)
1775         : m_attributedStrings(strings), m_samples(samples), m_time(time), m_context(context)
1776     {
1777     }
1778 };
1779
1780 void AVFWrapper::processCue(void* context)
1781 {
1782     ASSERT(isMainThread());
1783     ASSERT(context);
1784
1785     if (!context)
1786         return;
1787
1788     std::unique_ptr<LegibleOutputData> legibleOutputData(reinterpret_cast<LegibleOutputData*>(context));
1789
1790     LockHolder locker(mapLock());
1791     AVFWrapper* self = avfWrapperForCallbackContext(legibleOutputData->m_context);
1792     if (!self) {
1793         LOG(Media, "AVFWrapper::processCue invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1794         return;
1795     }
1796
1797     if (!self->m_currentTextTrack)
1798         return;
1799
1800     self->m_currentTextTrack->processCue(legibleOutputData->m_attributedStrings.get(), legibleOutputData->m_samples.get(), legibleOutputData->m_time);
1801 }
1802
1803 void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef legibleOutput, CFArrayRef attributedStrings, CFArrayRef nativeSampleBuffers, CMTime itemTime)
1804 {
1805     ASSERT(!isMainThread());
1806     LockHolder locker(mapLock());
1807     AVFWrapper* self = avfWrapperForCallbackContext(context);
1808     if (!self) {
1809         LOG(Media, "AVFWrapper::legibleOutputCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1810         return;
1811     }
1812
1813     LOG(Media, "AVFWrapper::legibleOutputCallback(%p)", self);
1814
1815     ASSERT(legibleOutput == self->m_legibleOutput);
1816
1817     auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, toMediaTime(itemTime), context);
1818
1819     dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.release(), processCue);
1820 }
1821 #endif
1822
1823 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1824 struct LoadRequestData {
1825     RetainPtr<AVCFAssetResourceLoadingRequestRef> m_request;
1826     void* m_context;
1827
1828     LoadRequestData(AVCFAssetResourceLoadingRequestRef request, void* context)
1829         : m_request(request), m_context(context)
1830     {
1831     }
1832 };
1833
1834 void AVFWrapper::processShouldWaitForLoadingOfResource(void* context)
1835 {
1836     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1837     ASSERT(context);
1838
1839     if (!context)
1840         return;
1841
1842     std::unique_ptr<LoadRequestData> loadRequestData(reinterpret_cast<LoadRequestData*>(context));
1843
1844     LockHolder locker(mapLock());
1845     AVFWrapper* self = avfWrapperForCallbackContext(loadRequestData->m_context);
1846     if (!self) {
1847         LOG(Media, "AVFWrapper::processShouldWaitForLoadingOfResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1848         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1849         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1850         return;
1851     }
1852
1853     if (!self->shouldWaitForLoadingOfResource(loadRequestData->m_request.get())) {
1854         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1855         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1856     }
1857 }
1858
1859 bool AVFWrapper::shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest)
1860 {
1861 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
1862     RetainPtr<CFURLRequestRef> urlRequest = AVCFAssetResourceLoadingRequestGetURLRequest(avRequest);
1863     RetainPtr<CFURLRef> requestURL = CFURLRequestGetURL(urlRequest.get());
1864     RetainPtr<CFStringRef> schemeRef = adoptCF(CFURLCopyScheme(requestURL.get()));
1865     String scheme = schemeRef.get();
1866
1867     if (scheme == "skd") {
1868         RetainPtr<CFURLRef> absoluteURL = adoptCF(CFURLCopyAbsoluteURL(requestURL.get()));
1869         RetainPtr<CFStringRef> keyURIRef = CFURLGetString(absoluteURL.get());
1870         String keyURI = keyURIRef.get();
1871
1872         // Create an initData with the following layout:
1873         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1874         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1875         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1876         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1877         initDataView->set<uint32_t>(0, keyURISize, true);
1878
1879         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1880         keyURIArray->setRange(reinterpret_cast<const uint16_t*>(StringView(keyURI).upconvertedCharacters().get()), keyURI.length() / sizeof(unsigned char), 0);
1881
1882         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1883         if (!m_owner->player()->keyNeeded(initData.get()))
1884             return false;
1885
1886         setRequestForKey(keyURI, avRequest);
1887         return true;
1888     }
1889 #endif
1890
1891     RefPtr<WebCoreAVCFResourceLoader> resourceLoader = WebCoreAVCFResourceLoader::create(m_owner, avRequest);
1892     m_owner->m_resourceLoaderMap.add(avRequest, resourceLoader);
1893     resourceLoader->startLoading();
1894     return true;
1895 }
1896
1897 Boolean AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef resourceLoader, AVCFAssetResourceLoadingRequestRef loadingRequest, void *context)
1898 {
1899     ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue());
1900     LockHolder locker(mapLock());
1901     AVFWrapper* self = avfWrapperForCallbackContext(context);
1902     if (!self) {
1903         LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1904         return false;
1905     }
1906
1907     LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(%p)", self);
1908
1909     auto loadRequestData = std::make_unique<LoadRequestData>(loadingRequest, context);
1910
1911     dispatch_async_f(dispatch_get_main_queue(), loadRequestData.release(), processShouldWaitForLoadingOfResource);
1912
1913     return true;
1914 }
1915 #endif
1916
1917 void AVFWrapper::setAsset(AVCFURLAssetRef asset)
1918 {
1919     if (asset == avAsset())
1920         return;
1921
1922     AVCFAssetCancelLoading(avAsset());
1923     m_avAsset = asset;
1924 }
1925
1926 PlatformLayer* AVFWrapper::platformLayer()
1927 {
1928     ASSERT(isMainThread());
1929     if (m_videoLayerWrapper)
1930         return m_videoLayerWrapper->platformLayer();
1931
1932     if (!videoLayer())
1933         return 0;
1934
1935     // Create a PlatformCALayer so we can resize the video layer to match the element size.
1936     m_layerClient = std::make_unique<LayerClient>(this);
1937     if (!m_layerClient)
1938         return 0;
1939
1940     m_videoLayerWrapper = PlatformCALayerWin::create(PlatformCALayer::LayerTypeLayer, m_layerClient.get());
1941     if (!m_videoLayerWrapper)
1942         return 0;
1943
1944     m_caVideoLayer = adoptCF(AVCFPlayerLayerCopyCACFLayer(m_avCFVideoLayer.get()));
1945
1946     CACFLayerInsertSublayer(m_videoLayerWrapper->platformLayer(), m_caVideoLayer.get(), 0);
1947     m_videoLayerWrapper->setAnchorPoint(FloatPoint3D());
1948     m_videoLayerWrapper->setNeedsLayout();
1949     updateVideoLayerGravity();
1950
1951     return m_videoLayerWrapper->platformLayer();
1952 }
1953
1954 void AVFWrapper::createAVCFVideoLayer()
1955 {
1956     ASSERT(isMainThread());
1957     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1958     if (!avPlayer() || m_avCFVideoLayer)
1959         return;
1960
1961     // The layer will get hooked up via RenderLayerBacking::updateConfiguration().
1962     m_avCFVideoLayer = adoptCF(AVCFPlayerLayerCreateWithAVCFPlayer(kCFAllocatorDefault, avPlayer(), m_notificationQueue));
1963     LOG(Media, "AVFWrapper::createAVCFVideoLayer(%p) - returning %p", this, videoLayer());
1964 }
1965
1966 void AVFWrapper::destroyVideoLayer()
1967 {
1968     ASSERT(isMainThread());
1969     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1970     LOG(Media, "AVFWrapper::destroyVideoLayer(%p)", this);
1971     m_layerClient = nullptr;
1972     m_caVideoLayer = nullptr;
1973     m_videoLayerWrapper = nullptr;
1974     if (!m_avCFVideoLayer.get())
1975         return;
1976
1977     AVCFPlayerLayerSetPlayer((AVCFPlayerLayerRef)m_avCFVideoLayer.get(), nullptr);
1978     m_avCFVideoLayer = nullptr;
1979 }
1980
1981 void AVFWrapper::setVideoLayerNeedsCommit()
1982 {
1983     if (m_videoLayerWrapper)
1984         m_videoLayerWrapper->setNeedsCommit();
1985 }
1986
1987 void AVFWrapper::setVideoLayerHidden(bool value)
1988 {
1989     if (m_videoLayerWrapper)
1990         m_videoLayerWrapper->setHidden(value);
1991 }
1992
1993 void AVFWrapper::createImageGenerator()
1994 {
1995     ASSERT(isMainThread());
1996     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1997     if (!avAsset() || m_imageGenerator)
1998         return;
1999
2000     m_imageGenerator = adoptCF(AVCFAssetImageGeneratorCreateWithAsset(kCFAllocatorDefault, avAsset()));
2001
2002     AVCFAssetImageGeneratorSetApertureMode(m_imageGenerator.get(), AVCFAssetImageGeneratorApertureModeCleanAperture);
2003     AVCFAssetImageGeneratorSetRequestedTimeToleranceBefore(m_imageGenerator.get(), kCMTimeZero);
2004     AVCFAssetImageGeneratorSetRequestedTimeToleranceAfter(m_imageGenerator.get(), kCMTimeZero);
2005     AVCFAssetImageGeneratorSetAppliesPreferredTrackTransform(m_imageGenerator.get(), true);
2006
2007     LOG(Media, "AVFWrapper::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
2008 }
2009
2010 void AVFWrapper::destroyImageGenerator()
2011 {
2012     ASSERT(isMainThread());
2013     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
2014     LOG(Media, "AVFWrapper::destroyImageGenerator(%p)", this);
2015     m_imageGenerator = 0;
2016 }
2017
2018 RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(const MediaTime& time, const FloatRect& rect)
2019 {
2020     if (!m_imageGenerator)
2021         return 0;
2022
2023 #if !LOG_DISABLED
2024     double start = monotonicallyIncreasingTime();
2025 #endif
2026
2027     AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size()));
2028     RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), toCMTime(time), 0, 0));
2029     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawimage.get(), adoptCF(CGColorSpaceCreateDeviceRGB()).get()));
2030
2031 #if !LOG_DISABLED
2032     double duration = monotonicallyIncreasingTime() - start;
2033     LOG(Media, "AVFWrapper::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
2034 #endif
2035
2036     return image;
2037 }
2038
2039 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2040 AVCFMediaSelectionGroupRef AVFWrapper::safeMediaSelectionGroupForLegibleMedia() const
2041 {
2042     if (!avAsset())
2043         return 0;
2044
2045     if (AVCFAssetGetStatusOfValueForProperty(avAsset(), AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) != AVCFPropertyValueStatusLoaded)
2046         return 0;
2047
2048     return AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(), AVCFMediaCharacteristicLegible);
2049 }
2050 #endif
2051
2052 void AVFWrapper::updateVideoLayerGravity()
2053 {
2054     // We should call AVCFPlayerLayerSetVideoGravity() here, but it is not yet implemented.
2055     // FIXME: <rdar://problem/14884340>
2056 }
2057
2058 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
2059 void AVFWrapper::setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest)
2060 {
2061     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2062     if (requestsIterator != m_keyURIToRequestMap.end()) {
2063         requestsIterator->value.append(avRequest);
2064         return;
2065     }
2066
2067     Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>> requests;
2068     requests.append(avRequest);
2069     m_keyURIToRequestMap.set(keyURI, requests);
2070 }
2071
2072 RetainPtr<AVCFAssetResourceLoadingRequestRef> AVFWrapper::takeRequestForKeyURI(const String& keyURI)
2073 {
2074     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2075     if (requestsIterator == m_keyURIToRequestMap.end())
2076         return RetainPtr<AVCFAssetResourceLoadingRequestRef>();
2077
2078     auto request = requestsIterator->value.takeLast();
2079     if (requestsIterator->value.isEmpty())
2080         m_keyURIToRequestMap.take(keyURI);
2081
2082     return request;
2083 }
2084 #endif
2085
2086 void LayerClient::platformCALayerLayoutSublayersOfLayer(PlatformCALayer* wrapperLayer)
2087 {
2088     ASSERT(isMainThread());
2089     ASSERT(m_parent);
2090     ASSERT(m_parent->videoLayerWrapper() == wrapperLayer->platformLayer());
2091
2092     CGRect bounds = wrapperLayer->bounds();
2093     CGPoint anchor = CACFLayerGetAnchorPoint(m_parent->caVideoLayer());
2094     FloatPoint position(bounds.size.width * anchor.x, bounds.size.height * anchor.y); 
2095
2096     CACFLayerSetPosition(m_parent->caVideoLayer(), position);
2097     CACFLayerSetBounds(m_parent->caVideoLayer(), bounds);
2098
2099     AVCFPlayerLayerSetFrame(m_parent->videoLayer(), CGRectMake(0, 0, bounds.size.width, bounds.size.height));
2100 }
2101
2102 } // namespace WebCore
2103
2104 #else
2105 // AVFoundation should always be enabled for Apple production builds.
2106 #if __PRODUCTION__ && !USE(AVFOUNDATION)
2107 #error AVFoundation is not enabled!
2108 #endif // __PRODUCTION__ && !USE(AVFOUNDATION)
2109 #endif // USE(AVFOUNDATION)
2110 #endif // PLATFORM(WIN) && ENABLE(VIDEO)