[Win] Extend AVFoundationCF Media Implementation
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / cf / MediaPlayerPrivateAVFoundationCF.cpp
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if PLATFORM(WIN) && ENABLE(VIDEO) 
29
30 #if USE(AVFOUNDATION)
31
32 #include "MediaPlayerPrivateAVFoundationCF.h"
33
34 #include "ApplicationCacheResource.h"
35 #include "CDMSessionAVFoundationCF.h"
36 #include "COMPtr.h"
37 #include "FloatConversion.h"
38 #include "FrameView.h"
39 #include "GraphicsContext.h"
40 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
41 #include "InbandTextTrackPrivateAVCF.h"
42 #else
43 #include "InbandTextTrackPrivateLegacyAVCF.h"
44 #endif
45 #include "MediaTimeAVFoundation.h"
46 #include "URL.h"
47 #include "Logging.h"
48 #include "PlatformCALayerWin.h"
49 #include "SoftLinking.h"
50 #include "TimeRanges.h"
51 #include "WebCoreAVCFResourceLoader.h"
52
53 #include <AVFoundationCF/AVCFPlayerItem.h>
54 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
55 #include <AVFoundationCF/AVCFPlayerItemLegibleOutput.h>
56 #endif
57 #include <AVFoundationCF/AVCFPlayerLayer.h>
58 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) || HAVE(ENCRYPTED_MEDIA_V2)
59 #include <AVFoundationCF/AVCFAssetResourceLoader.h>
60 #endif
61 #include <AVFoundationCF/AVFoundationCF.h>
62 #include <CoreMedia/CoreMedia.h>
63 #include <d3d9.h>
64 #include <delayimp.h>
65 #include <dispatch/dispatch.h>
66 #if ENABLE(ENCRYPTED_MEDIA_V2)
67 #include <runtime/DataView.h>
68 #include <runtime/Uint16Array.h>
69 #endif
70 #include <wtf/HashMap.h>
71 #include <wtf/Threading.h>
72 #include <wtf/text/CString.h>
73 #include <wtf/text/StringView.h>
74 #include <wtf/StringPrintStream.h>
75
76 // The softlink header files must be included after the AVCF and CoreMedia header files.
77 #include "AVFoundationCFSoftLinking.h"
78 #include "CoreMediaSoftLinking.h"
79
80 // We don't bother softlinking against libdispatch since it's already been loaded by AAS.
81 #ifdef DEBUG_ALL
82 #pragma comment(lib, "libdispatch_debug.lib")
83 #else
84 #pragma comment(lib, "libdispatch.lib")
85 #endif
86
87 using namespace std;
88
89 namespace WebCore {
90
91 class LayerClient;
92
93 class AVFWrapper {
94 public:
95     AVFWrapper(MediaPlayerPrivateAVFoundationCF*);
96     ~AVFWrapper();
97
98     void scheduleDisconnectAndDelete();
99
100     void createAVCFVideoLayer();
101     void destroyVideoLayer();
102     PlatformLayer* platformLayer();
103
104     CACFLayerRef caVideoLayer() { return m_caVideoLayer.get(); }
105     PlatformLayer* videoLayerWrapper() { return m_videoLayerWrapper ? m_videoLayerWrapper->platformLayer() : 0; };
106     void setVideoLayerNeedsCommit();
107     void setVideoLayerHidden(bool);
108
109     void createImageGenerator();
110     void destroyImageGenerator();
111     RetainPtr<CGImageRef> createImageForTimeInRect(const MediaTime&, const IntRect&);
112
113     void createAssetForURL(const String& url, bool inheritURI);
114     void setAsset(AVCFURLAssetRef);
115     
116     void createPlayer(IDirect3DDevice9*);
117     void createPlayerItem();
118     
119     void checkPlayability();
120     void beginLoadingMetadata();
121     
122     void seekToTime(const MediaTime&, const MediaTime&, const MediaTime&);
123     void updateVideoLayerGravity();
124
125     void setCurrentTextTrack(InbandTextTrackPrivateAVF*);
126     InbandTextTrackPrivateAVF* currentTextTrack() const { return m_currentTextTrack; }
127
128 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
129     static void legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef, CFArrayRef attributedString, CFArrayRef nativeSampleBuffers, CMTime itemTime);
130     static void processCue(void* context);
131 #endif
132 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
133     static Boolean resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef, AVCFAssetResourceLoadingRequestRef, void* context);
134 #endif
135     static void loadMetadataCompletionCallback(AVCFAssetRef, void*);
136     static void loadPlayableCompletionCallback(AVCFAssetRef, void*);
137     static void periodicTimeObserverCallback(AVCFPlayerRef, CMTime, void*);
138     static void seekCompletedCallback(AVCFPlayerItemRef, Boolean, void*);
139     static void notificationCallback(CFNotificationCenterRef, void*, CFStringRef, const void*, CFDictionaryRef);
140     static void processNotification(void* context);
141
142     inline AVCFPlayerLayerRef videoLayer() const { return (AVCFPlayerLayerRef)m_avCFVideoLayer.get(); }
143     inline AVCFPlayerRef avPlayer() const { return (AVCFPlayerRef)m_avPlayer.get(); }
144     inline AVCFURLAssetRef avAsset() const { return (AVCFURLAssetRef)m_avAsset.get(); }
145     inline AVCFPlayerItemRef avPlayerItem() const { return (AVCFPlayerItemRef)m_avPlayerItem.get(); }
146     inline AVCFPlayerObserverRef timeObserver() const { return (AVCFPlayerObserverRef)m_timeObserver.get(); }
147     inline AVCFAssetImageGeneratorRef imageGenerator() const { return m_imageGenerator.get(); }
148 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
149     inline AVCFPlayerItemLegibleOutputRef legibleOutput() const { return m_legibleOutput.get(); }
150     AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia() const;
151 #endif
152     inline dispatch_queue_t dispatchQueue() const { return m_notificationQueue; }
153
154 #if ENABLE(ENCRYPTED_MEDIA_V2)
155     RetainPtr<AVCFAssetResourceLoadingRequestRef> takeRequestForKeyURI(const String&);
156     void setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest);
157 #endif
158
159 private:
160     inline void* callbackContext() const { return reinterpret_cast<void*>(m_objectID); }
161
162     static Mutex& mapLock();
163     static HashMap<uintptr_t, AVFWrapper*>& map();
164     static AVFWrapper* avfWrapperForCallbackContext(void*);
165     void addToMap();
166     void removeFromMap() const;
167 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
168     bool shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest);
169     static void processShouldWaitForLoadingOfResource(void* context);
170 #endif
171
172     static void disconnectAndDeleteAVFWrapper(void*);
173
174     static uintptr_t s_nextAVFWrapperObjectID;
175     uintptr_t m_objectID;
176
177     MediaPlayerPrivateAVFoundationCF* m_owner;
178
179     RetainPtr<AVCFPlayerRef> m_avPlayer;
180     RetainPtr<AVCFURLAssetRef> m_avAsset;
181     RetainPtr<AVCFPlayerItemRef> m_avPlayerItem;
182     RetainPtr<AVCFPlayerLayerRef> m_avCFVideoLayer;
183     RetainPtr<AVCFPlayerObserverRef> m_timeObserver;
184     RetainPtr<AVCFAssetImageGeneratorRef> m_imageGenerator;
185 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
186     RetainPtr<AVCFPlayerItemLegibleOutputRef> m_legibleOutput;
187     RetainPtr<AVCFMediaSelectionGroupRef> m_selectionGroup;
188 #endif
189
190     dispatch_queue_t m_notificationQueue;
191
192     mutable RetainPtr<CACFLayerRef> m_caVideoLayer;
193     RefPtr<PlatformCALayer> m_videoLayerWrapper;
194
195     OwnPtr<LayerClient> m_layerClient;
196     COMPtr<IDirect3DDevice9Ex> m_d3dDevice;
197
198     InbandTextTrackPrivateAVF* m_currentTextTrack;
199
200 #if ENABLE(ENCRYPTED_MEDIA_V2)
201     HashMap<String, Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>>> m_keyURIToRequestMap;
202     AVCFAssetResourceLoaderCallbacks m_resourceLoaderCallbacks;
203 #endif
204 };
205
206 uintptr_t AVFWrapper::s_nextAVFWrapperObjectID;
207
208 class LayerClient : public PlatformCALayerClient {
209 public:
210     LayerClient(AVFWrapper* parent) : m_parent(parent) { }
211     virtual ~LayerClient() { m_parent = 0; }
212
213 private:
214     virtual void platformCALayerLayoutSublayersOfLayer(PlatformCALayer*);
215     virtual bool platformCALayerRespondsToLayoutChanges() const { return true; }
216
217     virtual void platformCALayerAnimationStarted(CFTimeInterval beginTime) { }
218     virtual GraphicsLayer::CompositingCoordinatesOrientation platformCALayerContentsOrientation() const { return GraphicsLayer::CompositingCoordinatesBottomUp; }
219     virtual void platformCALayerPaintContents(PlatformCALayer*, GraphicsContext&, const FloatRect&) { }
220     virtual bool platformCALayerShowDebugBorders() const { return false; }
221     virtual bool platformCALayerShowRepaintCounter(PlatformCALayer*) const { return false; }
222     virtual int platformCALayerIncrementRepaintCount(PlatformCALayer*) { return 0; }
223
224     virtual bool platformCALayerContentsOpaque() const { return false; }
225     virtual bool platformCALayerDrawsContent() const { return false; }
226     virtual float platformCALayerDeviceScaleFactor() const { return 1; }
227
228     AVFWrapper* m_parent;
229 };
230
231 #if !LOG_DISABLED
232 static const char* boolString(bool val)
233 {
234     return val ? "true" : "false";
235 }
236 #endif
237
238 static CFArrayRef createMetadataKeyNames()
239 {
240     static const CFStringRef keyNames[] = {
241         AVCFAssetPropertyDuration,
242         AVCFAssetPropertyNaturalSize,
243         AVCFAssetPropertyPreferredTransform,
244         AVCFAssetPropertyPreferredRate,
245         AVCFAssetPropertyPlayable,
246         AVCFAssetPropertyTracks,
247 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
248         AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions,
249 #endif
250     };
251     
252     return CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
253 }
254
255 static CFArrayRef metadataKeyNames()
256 {
257     static CFArrayRef keys = createMetadataKeyNames();
258     return keys;
259 }
260
261 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
262 static CFStringRef CMTimeRangeStartKey()
263 {
264     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("start")));
265     return key;
266 }
267
268 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
269 static CFStringRef CMTimeRangeDurationKey()
270 {
271     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("duration")));
272     return key;
273 }
274
275 // FIXME: It would be better if AVCF exported this notification name.
276 static CFStringRef CACFContextNeedsFlushNotification()
277 {
278     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, name, (CFSTR("kCACFContextNeedsFlushNotification")));
279     return name;
280 }
281
282 // Define AVCF object accessors as inline functions here instead of in MediaPlayerPrivateAVFoundationCF so we don't have
283 // to include the AVCF headers in MediaPlayerPrivateAVFoundationCF.h
284 inline AVCFPlayerLayerRef videoLayer(AVFWrapper* wrapper)
285
286     return wrapper ? wrapper->videoLayer() : 0; 
287 }
288
289 inline AVCFPlayerRef avPlayer(AVFWrapper* wrapper)
290
291     return wrapper ? wrapper->avPlayer() : 0; 
292 }
293
294 inline AVCFURLAssetRef avAsset(AVFWrapper* wrapper)
295
296     return wrapper ? wrapper->avAsset() : 0; 
297 }
298
299 inline AVCFPlayerItemRef avPlayerItem(AVFWrapper* wrapper)
300
301     return wrapper ? wrapper->avPlayerItem() : 0; 
302 }
303
304 inline AVCFAssetImageGeneratorRef imageGenerator(AVFWrapper* wrapper)
305
306     return wrapper ? wrapper->imageGenerator() : 0; 
307 }
308
309 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
310 inline AVCFPlayerItemLegibleOutputRef avLegibleOutput(AVFWrapper* wrapper)
311 {
312     return wrapper ? wrapper->legibleOutput() : 0;
313 }
314
315 inline AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia(AVFWrapper* wrapper)
316 {
317     return wrapper ? wrapper->safeMediaSelectionGroupForLegibleMedia() : 0;
318 }
319 #endif
320
321 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
322 static dispatch_queue_t globalQueue = nullptr;
323
324 static void initGlobalLoaderDelegateQueue(void* ctx)
325 {
326     globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
327 }
328
329 static dispatch_queue_t globalLoaderDelegateQueue()
330 {
331     static dispatch_once_t onceToken;
332
333     dispatch_once_f(&onceToken, nullptr, initGlobalLoaderDelegateQueue);
334
335     return globalQueue;
336 }
337 #endif
338
339 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationCF::create(MediaPlayer* player) 
340
341     return adoptPtr(new MediaPlayerPrivateAVFoundationCF(player));
342 }
343
344 void MediaPlayerPrivateAVFoundationCF::registerMediaEngine(MediaEngineRegistrar registrar)
345 {
346     if (isAvailable())
347         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
348 }
349
350 MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(MediaPlayer* player)
351     : MediaPlayerPrivateAVFoundation(player)
352     , m_avfWrapper(0)
353     , m_videoFrameHasDrawn(false)
354 {
355     LOG(Media, "MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(%p)", this);
356 }
357
358 MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF()
359 {
360     LOG(Media, "MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF(%p)", this);
361 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
362     for (auto& pair : m_resourceLoaderMap)
363         pair.value->invalidate();
364 #endif
365     cancelLoad();
366 }
367
368 void MediaPlayerPrivateAVFoundationCF::cancelLoad()
369 {
370     LOG(Media, "MediaPlayerPrivateAVFoundationCF::cancelLoad(%p)", this);
371
372     // Do nothing when our cancellation of pending loading calls its completion handler
373     setDelayCallbacks(true);
374     setIgnoreLoadStateChanges(true);
375
376     tearDownVideoRendering();
377
378     clearTextTracks();
379
380     if (m_avfWrapper) {
381         // The AVCF objects have to be destroyed on the same dispatch queue used for notifications, so schedule a call to 
382         // disconnectAndDeleteAVFWrapper on that queue. 
383         m_avfWrapper->scheduleDisconnectAndDelete();
384         m_avfWrapper = 0;
385     }
386
387     setIgnoreLoadStateChanges(false);
388     setDelayCallbacks(false);
389 }
390
391 void MediaPlayerPrivateAVFoundationCF::updateVideoLayerGravity()
392 {
393     ASSERT(supportsAcceleratedRendering());
394
395     if (m_avfWrapper)
396         m_avfWrapper->updateVideoLayerGravity();
397 }
398
399 bool MediaPlayerPrivateAVFoundationCF::hasLayerRenderer() const
400 {
401     return videoLayer(m_avfWrapper);
402 }
403
404 bool MediaPlayerPrivateAVFoundationCF::hasContextRenderer() const
405 {
406     return imageGenerator(m_avfWrapper);
407 }
408
409 void MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer()
410 {
411     LOG(Media, "MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer(%p)", this);
412     ASSERT(isMainThread());
413
414     if (imageGenerator(m_avfWrapper))
415         return;
416
417     if (m_avfWrapper)
418         m_avfWrapper->createImageGenerator();
419 }
420
421 void MediaPlayerPrivateAVFoundationCF::destroyContextVideoRenderer()
422 {
423     ASSERT(isMainThread());
424     if (m_avfWrapper)
425         m_avfWrapper->destroyImageGenerator();
426 }
427
428 void MediaPlayerPrivateAVFoundationCF::createVideoLayer()
429 {
430     ASSERT(isMainThread());
431     ASSERT(supportsAcceleratedRendering());
432
433     if (m_avfWrapper)
434         m_avfWrapper->createAVCFVideoLayer();
435 }
436
437 void MediaPlayerPrivateAVFoundationCF::destroyVideoLayer()
438 {
439     ASSERT(isMainThread());
440     LOG(Media, "MediaPlayerPrivateAVFoundationCF::destroyVideoLayer(%p) - destroying %p", this, videoLayer(m_avfWrapper));
441     if (m_avfWrapper)
442         m_avfWrapper->destroyVideoLayer();
443 }
444
445 bool MediaPlayerPrivateAVFoundationCF::hasAvailableVideoFrame() const
446 {
447     return (m_videoFrameHasDrawn || (videoLayer(m_avfWrapper) && AVCFPlayerLayerIsReadyForDisplay(videoLayer(m_avfWrapper))));
448 }
449
450 void MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
451 {
452     if (m_avfWrapper)
453         m_avfWrapper->setCurrentTextTrack(track);
454 }
455
456 InbandTextTrackPrivateAVF* MediaPlayerPrivateAVFoundationCF::currentTextTrack() const
457 {
458     if (m_avfWrapper)
459         return m_avfWrapper->currentTextTrack();
460
461     return 0;
462 }
463
464 void MediaPlayerPrivateAVFoundationCF::createAVAssetForURL(const String& url)
465 {
466     ASSERT(!m_avfWrapper);
467
468     setDelayCallbacks(true);
469
470     bool inheritURI = player()->doesHaveAttribute("x-itunes-inherit-uri-query-component");
471
472     m_avfWrapper = new AVFWrapper(this);
473     m_avfWrapper->createAssetForURL(url, inheritURI);
474     setDelayCallbacks(false);
475 }
476
477 void MediaPlayerPrivateAVFoundationCF::createAVPlayer()
478 {
479     ASSERT(isMainThread());
480     ASSERT(m_avfWrapper);
481     
482     setDelayCallbacks(true);
483     m_avfWrapper->createPlayer(reinterpret_cast<IDirect3DDevice9*>(player()->graphicsDeviceAdapter()));
484     setDelayCallbacks(false);
485 }
486
487 void MediaPlayerPrivateAVFoundationCF::createAVPlayerItem()
488 {
489     ASSERT(isMainThread());
490     ASSERT(m_avfWrapper);
491     
492     setDelayCallbacks(true);
493     m_avfWrapper->createPlayerItem();
494
495     setDelayCallbacks(false);
496 }
497
498 void MediaPlayerPrivateAVFoundationCF::checkPlayability()
499 {
500     ASSERT(m_avfWrapper);
501     m_avfWrapper->checkPlayability();
502 }
503
504 void MediaPlayerPrivateAVFoundationCF::beginLoadingMetadata()
505 {
506     ASSERT(m_avfWrapper);
507     m_avfWrapper->beginLoadingMetadata();
508 }
509
510 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationCF::playerItemStatus() const
511 {
512     if (!avPlayerItem(m_avfWrapper))
513         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
514
515     AVCFPlayerItemStatus status = AVCFPlayerItemGetStatus(avPlayerItem(m_avfWrapper), 0);
516     if (status == AVCFPlayerItemStatusUnknown)
517         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
518     if (status == AVCFPlayerItemStatusFailed)
519         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
520     if (AVCFPlayerItemIsPlaybackLikelyToKeepUp(avPlayerItem(m_avfWrapper)))
521         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
522     if (AVCFPlayerItemIsPlaybackBufferFull(avPlayerItem(m_avfWrapper)))
523         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
524     if (AVCFPlayerItemIsPlaybackBufferEmpty(avPlayerItem(m_avfWrapper)))
525         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
526     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
527 }
528
529 PlatformMedia MediaPlayerPrivateAVFoundationCF::platformMedia() const
530 {
531     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformMedia(%p)", this);
532     PlatformMedia pm;
533     pm.type = PlatformMedia::AVFoundationCFMediaPlayerType;
534     pm.media.avcfMediaPlayer = (AVCFPlayer*)avPlayer(m_avfWrapper);
535     return pm;
536 }
537
538 PlatformLayer* MediaPlayerPrivateAVFoundationCF::platformLayer() const
539 {
540     ASSERT(isMainThread());
541     if (!m_avfWrapper)
542         return 0;
543
544     return m_avfWrapper->platformLayer();
545 }
546
547 void MediaPlayerPrivateAVFoundationCF::platformSetVisible(bool isVisible)
548 {
549     ASSERT(isMainThread());
550     if (!m_avfWrapper)
551         return;
552     
553     // FIXME: We use a CATransaction here on the Mac, we need to figure out why this was done there and
554     // whether we're affected by the same issue.
555     setDelayCallbacks(true);
556     m_avfWrapper->setVideoLayerHidden(!isVisible);    
557     if (!isVisible)
558         tearDownVideoRendering();
559     setDelayCallbacks(false);
560 }
561
562 void MediaPlayerPrivateAVFoundationCF::platformPlay()
563 {
564     LOG(Media, "MediaPlayerPrivateAVFoundationCF::play(%p)", this);
565     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
566         return;
567
568     setDelayCallbacks(true);
569     AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate());
570     setDelayCallbacks(false);
571 }
572
573 void MediaPlayerPrivateAVFoundationCF::platformPause()
574 {
575     LOG(Media, "MediaPlayerPrivateAVFoundationCF::pause(%p)", this);
576     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
577         return;
578
579     setDelayCallbacks(true);
580     AVCFPlayerSetRate(avPlayer(m_avfWrapper), 0);
581     setDelayCallbacks(false);
582 }
583
584 MediaTime MediaPlayerPrivateAVFoundationCF::platformDuration() const
585 {
586     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
587         return MediaTime::zeroTime();
588
589     CMTime cmDuration;
590
591     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
592     if (avPlayerItem(m_avfWrapper) && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
593         cmDuration = AVCFPlayerItemGetDuration(avPlayerItem(m_avfWrapper));
594     else
595         cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper));
596
597     if (CMTIME_IS_NUMERIC(cmDuration))
598         return toMediaTime(cmDuration);
599
600     if (CMTIME_IS_INDEFINITE(cmDuration))
601         return MediaTime::positiveInfiniteTime();
602
603     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
604     return MediaTime::invalidTime();
605 }
606
607 MediaTime MediaPlayerPrivateAVFoundationCF::currentMediaTime() const
608 {
609     if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper))
610         return MediaTime::zeroTime();
611
612     CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper));
613     if (CMTIME_IS_NUMERIC(itemTime))
614         return max(toMediaTime(itemTime), MediaTime::zeroTime());
615
616     return MediaTime::zeroTime();
617 }
618
619 void MediaPlayerPrivateAVFoundationCF::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
620 {
621     if (!m_avfWrapper)
622         return;
623     
624     // seekToTime generates several event callbacks, update afterwards.
625     setDelayCallbacks(true);
626     m_avfWrapper->seekToTime(time, negativeTolerance, positiveTolerance);
627     setDelayCallbacks(false);
628 }
629
630 void MediaPlayerPrivateAVFoundationCF::setVolume(float volume)
631 {
632     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
633         return;
634
635     AVCFPlayerSetVolume(avPlayer(m_avfWrapper), volume);
636 }
637
638 void MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(bool closedCaptionsVisible)
639 {
640     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
641         return;
642
643     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible));
644     AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(m_avfWrapper), closedCaptionsVisible);
645 }
646
647 void MediaPlayerPrivateAVFoundationCF::updateRate()
648 {
649     LOG(Media, "MediaPlayerPrivateAVFoundationCF::updateRate(%p)", this);
650     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
651         return;
652
653     setDelayCallbacks(true);
654     AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate());
655     setDelayCallbacks(false);
656 }
657
658 float MediaPlayerPrivateAVFoundationCF::rate() const
659 {
660     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
661         return 0;
662
663     setDelayCallbacks(true);
664     float currentRate = AVCFPlayerGetRate(avPlayer(m_avfWrapper));
665     setDelayCallbacks(false);
666
667     return currentRate;
668 }
669
670 static bool timeRangeIsValidAndNotEmpty(CMTime start, CMTime duration)
671 {
672     // Is the range valid?
673     if (!CMTIME_IS_VALID(start) || !CMTIME_IS_VALID(duration) || duration.epoch || duration.value < 0)
674         return false;
675
676     if (CMTIME_COMPARE_INLINE(duration, ==, kCMTimeZero))
677         return false;
678
679     return true;
680 }
681
682 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationCF::platformBufferedTimeRanges() const
683 {
684     auto timeRanges = PlatformTimeRanges::create();
685
686     if (!avPlayerItem(m_avfWrapper))
687         return timeRanges;
688
689     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
690     if (!loadedRanges)
691         return timeRanges;
692
693     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
694     for (CFIndex i = 0; i < rangeCount; i++) {
695         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
696         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
697         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
698         
699         if (timeRangeIsValidAndNotEmpty(start, duration)) {
700             MediaTime rangeStart = toMediaTime(start);
701             MediaTime rangeEnd = rangeStart + toMediaTime(duration);
702             timeRanges->add(rangeStart, rangeEnd);
703         }
704     }
705
706     return timeRanges;
707 }
708
709 MediaTime MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 
710
711     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
712     if (!seekableRanges) 
713         return MediaTime::zeroTime(); 
714
715     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
716     bool hasValidRange = false; 
717     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
718     for (CFIndex i = 0; i < rangeCount; i++) {
719         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
720         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
721         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
722         if (!timeRangeIsValidAndNotEmpty(start, duration))
723             continue;
724
725         hasValidRange = true; 
726         MediaTime startOfRange = toMediaTime(start); 
727         if (minTimeSeekable > startOfRange) 
728             minTimeSeekable = startOfRange; 
729     } 
730     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime(); 
731
732
733 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
734 {
735     if (!avPlayerItem(m_avfWrapper))
736         return MediaTime::zeroTime();
737
738     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
739     if (!seekableRanges)
740         return MediaTime::zeroTime();
741
742     MediaTime maxTimeSeekable;
743     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
744     for (CFIndex i = 0; i < rangeCount; i++) {
745         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
746         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
747         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
748         if (!timeRangeIsValidAndNotEmpty(start, duration))
749             continue;
750         
751         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
752         if (maxTimeSeekable < endOfRange)
753             maxTimeSeekable = endOfRange;
754     }
755
756     return maxTimeSeekable;   
757 }
758
759 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
760 {
761     if (!avPlayerItem(m_avfWrapper))
762         return MediaTime::zeroTime();
763
764     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
765     if (!loadedRanges)
766         return MediaTime::zeroTime();
767
768     MediaTime maxTimeLoaded;
769     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
770     for (CFIndex i = 0; i < rangeCount; i++) {
771         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
772         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
773         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
774         if (!timeRangeIsValidAndNotEmpty(start, duration))
775             continue;
776         
777         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
778         if (maxTimeLoaded < endOfRange)
779             maxTimeLoaded = endOfRange;
780     }
781
782     return maxTimeLoaded;   
783 }
784
785 unsigned long long MediaPlayerPrivateAVFoundationCF::totalBytes() const
786 {
787     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
788         return 0;
789
790     int64_t totalMediaSize = 0;
791     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyAssetTracks(avAsset(m_avfWrapper)));
792     CFIndex trackCount = CFArrayGetCount(tracks.get());
793     for (CFIndex i = 0; i < trackCount; i++) {
794         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), i);
795         totalMediaSize += AVCFAssetTrackGetTotalSampleDataLength(assetTrack);
796     }
797
798     return static_cast<unsigned long long>(totalMediaSize);
799 }
800
801 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationCF::assetStatus() const
802 {
803     if (!avAsset(m_avfWrapper))
804         return MediaPlayerAVAssetStatusDoesNotExist;
805
806     // First, make sure all metadata properties we rely on are loaded.
807     CFArrayRef keys = metadataKeyNames();
808     CFIndex keyCount = CFArrayGetCount(keys);
809     for (CFIndex i = 0; i < keyCount; i++) {
810         CFStringRef keyName = static_cast<CFStringRef>(CFArrayGetValueAtIndex(keys, i));
811         AVCFPropertyValueStatus keyStatus = AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), keyName, 0);
812
813         if (keyStatus < AVCFPropertyValueStatusLoaded)
814             return MediaPlayerAVAssetStatusLoading;
815         if (keyStatus == AVCFPropertyValueStatusFailed) {
816             if (CFStringCompare(keyName, AVCFAssetPropertyNaturalSize, 0) == kCFCompareEqualTo) {
817                 // Don't treat a failure to retrieve @"naturalSize" as fatal. We will use @"presentationSize" instead.
818                 // <rdar://problem/15966685>
819                 continue;
820             }
821 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
822             if (CFStringCompare(keyName, AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) == kCFCompareEqualTo) {
823                 // On Windows, the media selection options are not available when initially interacting with a streaming source.
824                 // <rdar://problem/16160699>
825                 continue;
826             }
827 #endif
828             return MediaPlayerAVAssetStatusFailed;
829         }
830         if (keyStatus == AVCFPropertyValueStatusCancelled)
831             return MediaPlayerAVAssetStatusCancelled;
832     }
833
834     if (AVCFAssetIsPlayable(avAsset(m_avfWrapper)))
835         return MediaPlayerAVAssetStatusPlayable;
836
837     return MediaPlayerAVAssetStatusLoaded;
838 }
839
840 void MediaPlayerPrivateAVFoundationCF::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
841 {
842     ASSERT(isMainThread());
843     if (!metaDataAvailable() || context->paintingDisabled())
844         return;
845
846     if (currentRenderingMode() == MediaRenderingToLayer && !imageGenerator(m_avfWrapper)) {
847         // We're being told to render into a context, but we already have the
848         // video layer, which probably means we've been called from <canvas>.
849         createContextVideoRenderer();
850     }
851
852     paint(context, rect);
853 }
854
855 void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext* context, const IntRect& rect)
856 {
857     ASSERT(isMainThread());
858     if (!metaDataAvailable() || context->paintingDisabled() || !imageGenerator(m_avfWrapper))
859         return;
860
861     LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this);
862
863     setDelayCallbacks(true);
864     RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentMediaTime(), rect);
865     if (image) {
866         context->save();
867         context->translate(rect.x(), rect.y() + rect.height());
868         context->scale(FloatSize(1.0f, -1.0f));
869         context->setImageInterpolationQuality(InterpolationLow);
870         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
871         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
872         context->restore();
873         image = 0;
874     }
875     setDelayCallbacks(false);
876     
877     m_videoFrameHasDrawn = true;
878 }
879
880 static HashSet<String> mimeTypeCache()
881 {
882     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
883     static bool typeListInitialized = false;
884
885     if (typeListInitialized)
886         return cache;
887     typeListInitialized = true;
888
889     RetainPtr<CFArrayRef> supportedTypes = adoptCF(AVCFURLAssetCopyAudiovisualMIMETypes());
890     
891     ASSERT(supportedTypes);
892     if (!supportedTypes)
893         return cache;
894
895     CFIndex typeCount = CFArrayGetCount(supportedTypes.get());
896     for (CFIndex i = 0; i < typeCount; i++)
897         cache.add(static_cast<CFStringRef>(CFArrayGetValueAtIndex(supportedTypes.get(), i)));
898
899     return cache;
900
901
902 void MediaPlayerPrivateAVFoundationCF::getSupportedTypes(HashSet<String>& supportedTypes)
903 {
904     supportedTypes = mimeTypeCache();
905
906
907 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
908 static bool keySystemIsSupported(const String& keySystem)
909 {
910     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
911         return true;
912     return false;
913 }
914 #endif
915
916 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationCF::supportsType(const MediaEngineSupportParameters& parameters)
917 {
918     // Only return "IsSupported" if there is no codecs parameter for now as there is no way to ask if it supports an
919     // extended MIME type until rdar://8721715 is fixed.
920     if (mimeTypeCache().contains(parameters.type))
921         return parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
922
923     return MediaPlayer::IsNotSupported;
924 }
925
926 bool MediaPlayerPrivateAVFoundationCF::supportsKeySystem(const String& keySystem, const String& mimeType)
927 {
928 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
929     if (keySystem.isEmpty())
930         return false;
931
932     if (!keySystemIsSupported(keySystem))
933         return false;
934
935     if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
936         return false;
937
938     return true;
939 #else
940     UNUSED_PARAM(keySystem);
941     UNUSED_PARAM(mimeType);
942     return false;
943 #endif
944 }
945
946 bool MediaPlayerPrivateAVFoundationCF::isAvailable()
947 {
948     return AVFoundationCFLibrary() && CoreMediaLibrary();
949 }
950
951 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
952 void MediaPlayerPrivateAVFoundationCF::didCancelLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
953 {
954     WebCoreAVCFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
955
956     if (resourceLoader)
957         resourceLoader->stopLoading();
958 }
959
960 void MediaPlayerPrivateAVFoundationCF::didStopLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
961 {
962     m_resourceLoaderMap.remove(avRequest);
963 }
964 #endif
965
966 MediaTime MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(const MediaTime& timeValue) const
967 {
968     if (!metaDataAvailable())
969         return timeValue;
970
971     // FIXME - can not implement until rdar://8721669 is fixed.
972     return timeValue;
973 }
974
975 void MediaPlayerPrivateAVFoundationCF::tracksChanged()
976 {
977     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
978     m_languageOfPrimaryAudioTrack = String();
979
980     if (!avAsset(m_avfWrapper))
981         return;
982
983     setDelayCharacteristicsChangedNotification(true);
984
985     bool haveCCTrack = false;
986     bool hasCaptions = false;
987
988     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
989     // asked about those fairly frequently.
990     if (!avPlayerItem(m_avfWrapper)) {
991         // We don't have a player item yet, so check with the asset because some assets support inspection
992         // prior to becoming ready to play.
993         RetainPtr<CFArrayRef> visualTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
994         setHasVideo(CFArrayGetCount(visualTracks.get()));
995
996         RetainPtr<CFArrayRef> audioTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible));
997         setHasAudio(CFArrayGetCount(audioTracks.get()));
998
999 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1000         RetainPtr<CFArrayRef> captionTracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeClosedCaption));
1001         hasCaptions = CFArrayGetCount(captionTracks.get());
1002 #endif
1003     } else {
1004         bool hasVideo = false;
1005         bool hasAudio = false;
1006
1007         RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1008
1009         CFIndex trackCount = CFArrayGetCount(tracks.get());
1010         for (CFIndex i = 0; i < trackCount; i++) {
1011             AVCFPlayerItemTrackRef track = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1012             
1013             if (AVCFPlayerItemTrackIsEnabled(track)) {
1014                 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(track));
1015                 if (!assetTrack) {
1016                     // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1017                     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1018                     continue;
1019                 }
1020                 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1021                 if (!mediaType)
1022                     continue;
1023                 
1024                 if (CFStringCompare(mediaType, AVCFMediaTypeVideo, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1025                     hasVideo = true;
1026                 else if (CFStringCompare(mediaType, AVCFMediaTypeAudio, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1027                     hasAudio = true;
1028                 else if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) == kCFCompareEqualTo) {
1029 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1030                     hasCaptions = true;
1031 #endif
1032                     haveCCTrack = true;
1033                 }
1034             }
1035         }
1036
1037         setHasVideo(hasVideo);
1038         setHasAudio(hasAudio);
1039     }
1040
1041 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1042     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1043     if (legibleGroup) {
1044         RetainPtr<CFArrayRef> playableOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1045         hasCaptions = CFArrayGetCount(playableOptions.get());
1046         if (hasCaptions)
1047             processMediaSelectionOptions();
1048     }
1049 #endif
1050
1051 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1052     if (haveCCTrack)
1053         processLegacyClosedCaptionsTracks();
1054 #endif
1055
1056     setHasClosedCaptions(hasCaptions);
1057
1058     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s", 
1059         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1060
1061     sizeChanged();
1062
1063     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1064         characteristicsChanged();
1065
1066     setDelayCharacteristicsChangedNotification(false);
1067 }
1068
1069 void MediaPlayerPrivateAVFoundationCF::sizeChanged()
1070 {
1071     ASSERT(isMainThread());
1072     if (!avAsset(m_avfWrapper))
1073         return;
1074     
1075     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1076     // the union of all visual track rects.
1077     CGRect trackRectUnion = CGRectZero;
1078     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
1079     CFIndex trackCount = CFArrayGetCount(tracks.get());
1080     for (CFIndex i = 0; i < trackCount; i++) {
1081         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1082         
1083         CGSize trackSize = AVCFAssetTrackGetNaturalSize(assetTrack);
1084         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1085         trackRectUnion = CGRectUnion(trackRectUnion, CGRectApplyAffineTransform(trackRect, AVCFAssetTrackGetPreferredTransform(assetTrack)));
1086     }
1087     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1088     trackRectUnion = CGRectOffset(trackRectUnion, trackRectUnion.origin.x, trackRectUnion.origin.y);
1089     CGSize naturalSize = trackRectUnion.size;
1090
1091     if (!naturalSize.height && !naturalSize.width && avPlayerItem(m_avfWrapper))
1092         naturalSize = AVCFPlayerItemGetPresentationSize(avPlayerItem(m_avfWrapper));
1093
1094     // Also look at the asset's preferred transform so we account for a movie matrix.
1095     CGSize movieSize = CGSizeApplyAffineTransform(AVCFAssetGetNaturalSize(avAsset(m_avfWrapper)), AVCFAssetGetPreferredTransform(avAsset(m_avfWrapper)));
1096     if (movieSize.width > naturalSize.width)
1097         naturalSize.width = movieSize.width;
1098     if (movieSize.height > naturalSize.height)
1099         naturalSize.height = movieSize.height;
1100     setNaturalSize(IntSize(naturalSize));
1101 }
1102
1103 bool MediaPlayerPrivateAVFoundationCF::requiresImmediateCompositing() const
1104 {
1105     // The AVFoundationCF player needs to have the root compositor available at construction time
1106     // so it can attach to the rendering device. Otherwise it falls back to CPU-only mode.
1107     //
1108     // It would be nice if AVCFPlayer had some way to switch to hardware-accelerated mode
1109     // when asked, then we could follow AVFoundation's model and switch to compositing
1110     // mode when beginning to play media.
1111     return true;
1112 }
1113
1114 #if ENABLE(ENCRYPTED_MEDIA_V2)
1115 RetainPtr<AVCFAssetResourceLoadingRequestRef> MediaPlayerPrivateAVFoundationCF::takeRequestForKeyURI(const String& keyURI)
1116 {
1117     if (!m_avfWrapper)
1118         return nullptr;
1119
1120     return m_avfWrapper->takeRequestForKeyURI(keyURI);
1121 }
1122
1123 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem)
1124 {
1125     if (!keySystemIsSupported(keySystem))
1126         return nullptr;
1127
1128     return std::make_unique<CDMSessionAVFoundationCF>(this);
1129 }
1130 #endif
1131
1132 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1133 void MediaPlayerPrivateAVFoundationCF::processLegacyClosedCaptionsTracks()
1134 {
1135 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1136     AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), 0, safeMediaSelectionGroupForLegibleMedia(m_avfWrapper));
1137 #endif
1138
1139     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1140     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1141     CFIndex trackCount = CFArrayGetCount(tracks.get());
1142     for (CFIndex i = 0; i < trackCount; ++i) {
1143         AVCFPlayerItemTrackRef playerItemTrack = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1144
1145         RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(playerItemTrack));
1146         if (!assetTrack) {
1147             // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1148             LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1149             continue;
1150         }
1151         CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1152         if (!mediaType)
1153             continue;
1154                 
1155         if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) != kCFCompareEqualTo)
1156             continue;
1157
1158         bool newCCTrack = true;
1159         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1160             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1161                 continue;
1162
1163             RefPtr<InbandTextTrackPrivateLegacyAVCF> track = static_cast<InbandTextTrackPrivateLegacyAVCF*>(m_textTracks[i - 1].get());
1164             if (track->avPlayerItemTrack() == playerItemTrack) {
1165                 removedTextTracks.remove(i - 1);
1166                 newCCTrack = false;
1167                 break;
1168             }
1169         }
1170
1171         if (!newCCTrack)
1172             continue;
1173         
1174         m_textTracks.append(InbandTextTrackPrivateLegacyAVCF::create(this, playerItemTrack));
1175     }
1176
1177     processNewAndRemovedTextTracks(removedTextTracks);
1178 }
1179 #endif
1180
1181 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1182 void MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions()
1183 {
1184     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1185     if (!legibleGroup) {
1186         LOG(Media, "MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1187         return;
1188     }
1189
1190     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1191     // but set the selected legible track to nil so text tracks will not be automatically configured.
1192     if (!m_textTracks.size() && AVCFMediaSelectionGroupAllowsEmptySelection(legibleGroup)) {
1193         if (AVCFPlayerItemRef playerItem = avPlayerItem(m_avfWrapper))
1194             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(playerItem, 0, legibleGroup);
1195     }
1196
1197     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1198     RetainPtr<CFArrayRef> legibleOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1199     CFIndex legibleOptionsCount = CFArrayGetCount(legibleOptions.get());
1200     for (CFIndex i = 0; i < legibleOptionsCount; ++i) {
1201         AVCFMediaSelectionOptionRef option = static_cast<AVCFMediaSelectionOptionRef>(CFArrayGetValueAtIndex(legibleOptions.get(), i));
1202         bool newTrack = true;
1203         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1204             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1205                 continue;
1206
1207             RefPtr<InbandTextTrackPrivateAVCF> track = static_cast<InbandTextTrackPrivateAVCF*>(removedTextTracks[i - 1].get());
1208             if (CFEqual(track->mediaSelectionOption(), option)) {
1209                 removedTextTracks.remove(i - 1);
1210                 newTrack = false;
1211                 break;
1212             }
1213         }
1214         if (!newTrack)
1215             continue;
1216
1217         m_textTracks.append(InbandTextTrackPrivateAVCF::create(this, option, InbandTextTrackPrivate::Generic));
1218     }
1219
1220     processNewAndRemovedTextTracks(removedTextTracks);
1221 }
1222
1223 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1224
1225 void AVFWrapper::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
1226 {
1227     if (m_currentTextTrack == track)
1228         return;
1229
1230     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1231         
1232     m_currentTextTrack = track;
1233
1234     if (track) {
1235         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1236             AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), TRUE);
1237 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1238         else
1239             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), static_cast<InbandTextTrackPrivateAVCF*>(track)->mediaSelectionOption(), safeMediaSelectionGroupForLegibleMedia());
1240 #endif
1241     } else {
1242 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1243         AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), 0, safeMediaSelectionGroupForLegibleMedia());
1244 #endif
1245         AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), FALSE);
1246     }
1247 }
1248
1249 String MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack() const
1250 {
1251     if (!m_languageOfPrimaryAudioTrack.isNull())
1252         return m_languageOfPrimaryAudioTrack;
1253
1254     if (!avPlayerItem(m_avfWrapper))
1255         return emptyString();
1256
1257 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1258     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1259     AVCFMediaSelectionGroupRef audibleGroup = AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible);
1260     AVCFMediaSelectionOptionRef currentlySelectedAudibleOption = AVCFPlayerItemGetSelectedMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), audibleGroup);
1261     if (currentlySelectedAudibleOption) {
1262         RetainPtr<CFLocaleRef> audibleOptionLocale = adoptCF(AVCFMediaSelectionOptionCopyLocale(currentlySelectedAudibleOption));
1263         m_languageOfPrimaryAudioTrack = CFLocaleGetIdentifier(audibleOptionLocale.get());
1264         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1265
1266         return m_languageOfPrimaryAudioTrack;
1267     }
1268 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1269
1270     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1271     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1272     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeAudio));
1273     CFIndex trackCount = CFArrayGetCount(tracks.get());
1274     if (!tracks || trackCount != 1) {
1275         m_languageOfPrimaryAudioTrack = emptyString();
1276         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? trackCount : 0));
1277         return m_languageOfPrimaryAudioTrack;
1278     }
1279
1280     AVCFAssetTrackRef track = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), 0);
1281     RetainPtr<CFStringRef> language = adoptCF(AVCFAssetTrackCopyExtendedLanguageTag(track));
1282
1283     // If the language code is stored as a QuickTime 5-bit packed code there aren't enough bits for a full
1284     // RFC 4646 language tag so extendedLanguageTag returns null. In this case languageCode will return the
1285     // ISO 639-2/T language code so check it.
1286     if (!language)
1287         language = adoptCF(AVCFAssetTrackCopyLanguageCode(track));
1288
1289     // Some legacy tracks have "und" as a language, treat that the same as no language at all.
1290     if (language && CFStringCompare(language.get(), CFSTR("und"), kCFCompareCaseInsensitive) != kCFCompareEqualTo) {
1291         m_languageOfPrimaryAudioTrack = language.get();
1292         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1293         return m_languageOfPrimaryAudioTrack;
1294     }
1295
1296     LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1297     m_languageOfPrimaryAudioTrack = emptyString();
1298     return m_languageOfPrimaryAudioTrack;
1299 }
1300
1301 void MediaPlayerPrivateAVFoundationCF::contentsNeedsDisplay()
1302 {
1303     if (m_avfWrapper)
1304         m_avfWrapper->setVideoLayerNeedsCommit();
1305 }
1306
1307 AVFWrapper::AVFWrapper(MediaPlayerPrivateAVFoundationCF* owner)
1308     : m_owner(owner)
1309     , m_objectID(s_nextAVFWrapperObjectID++)
1310     , m_currentTextTrack(0)
1311 {
1312     ASSERT(isMainThread());
1313     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1314     LOG(Media, "AVFWrapper::AVFWrapper(%p)", this);
1315
1316     m_notificationQueue = dispatch_queue_create("MediaPlayerPrivateAVFoundationCF.notificationQueue", 0);
1317
1318 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1319     m_resourceLoaderCallbacks.version = kAVCFAssetResourceLoader_CallbacksVersion_1;
1320     m_resourceLoaderCallbacks.context = nullptr;
1321     m_resourceLoaderCallbacks.resourceLoaderShouldWaitForLoadingOfRequestedResource = AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource;
1322 #endif
1323
1324     addToMap();
1325 }
1326
1327 AVFWrapper::~AVFWrapper()
1328 {
1329     ASSERT(isMainThread());
1330     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1331     LOG(Media, "AVFWrapper::~AVFWrapper(%p %d)", this, m_objectID);
1332
1333     destroyVideoLayer();
1334     destroyImageGenerator();
1335
1336     if (m_notificationQueue)
1337         dispatch_release(m_notificationQueue);
1338
1339     if (avAsset()) {
1340         AVCFAssetCancelLoading(avAsset());
1341         m_avAsset = 0;
1342     }
1343
1344 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1345     if (legibleOutput()) {
1346         if (avPlayerItem())
1347             AVCFPlayerItemRemoveOutput(avPlayerItem(), legibleOutput());
1348         m_legibleOutput = 0;
1349     }
1350 #endif
1351
1352     m_avPlayerItem = 0;
1353     m_timeObserver = 0;
1354     m_avPlayer = 0;
1355 }
1356
1357 Mutex& AVFWrapper::mapLock()
1358 {
1359     static Mutex mapLock;
1360     return mapLock;
1361 }
1362
1363 HashMap<uintptr_t, AVFWrapper*>& AVFWrapper::map()
1364 {
1365     static HashMap<uintptr_t, AVFWrapper*>& map = *new HashMap<uintptr_t, AVFWrapper*>;
1366     return map;
1367 }
1368
1369 void AVFWrapper::addToMap()
1370 {
1371     MutexLocker locker(mapLock());
1372     
1373     // HashMap doesn't like a key of 0, and also make sure we aren't
1374     // using an object ID that's already in use.
1375     while (!m_objectID || (map().find(m_objectID) != map().end()))
1376         m_objectID = s_nextAVFWrapperObjectID++;
1377        
1378     LOG(Media, "AVFWrapper::addToMap(%p %d)", this, m_objectID);
1379
1380     map().add(m_objectID, this);
1381 }
1382
1383 void AVFWrapper::removeFromMap() const
1384 {
1385     LOG(Media, "AVFWrapper::removeFromMap(%p %d)", this, m_objectID);
1386
1387     MutexLocker locker(mapLock());
1388     map().remove(m_objectID);
1389 }
1390
1391 AVFWrapper* AVFWrapper::avfWrapperForCallbackContext(void* context)
1392 {
1393     // Assumes caller has locked mapLock().
1394     HashMap<uintptr_t, AVFWrapper*>::iterator it = map().find(reinterpret_cast<uintptr_t>(context));
1395     if (it == map().end())
1396         return 0;
1397
1398     return it->value;
1399 }
1400
1401 void AVFWrapper::scheduleDisconnectAndDelete()
1402 {
1403     // Ignore any subsequent notifications we might receive in notificationCallback().
1404     removeFromMap();
1405
1406     dispatch_async_f(dispatchQueue(), this, disconnectAndDeleteAVFWrapper);
1407 }
1408
1409 static void destroyAVFWrapper(void* context)
1410 {
1411     ASSERT(isMainThread());
1412     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1413     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1414     if (!avfWrapper)
1415         return;
1416
1417     delete avfWrapper;
1418 }
1419
1420 void AVFWrapper::disconnectAndDeleteAVFWrapper(void* context)
1421 {
1422     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1423
1424     LOG(Media, "AVFWrapper::disconnectAndDeleteAVFWrapper(%p)", avfWrapper);
1425
1426     if (avfWrapper->avPlayerItem()) {
1427         CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1428         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDidPlayToEndTimeNotification, avfWrapper->avPlayerItem());
1429         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemStatusChangedNotification, avfWrapper->avPlayerItem());
1430         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemTracksChangedNotification, avfWrapper->avPlayerItem());
1431         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemSeekableTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1432         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemLoadedTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1433         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemPresentationSizeChangedNotification, avfWrapper->avPlayerItem());
1434         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, avfWrapper->avPlayerItem());
1435         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, avfWrapper->avPlayerItem());
1436         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferFullChangedNotification, avfWrapper->avPlayerItem());
1437         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDurationChangedNotification, avfWrapper->avPlayerItem());
1438         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), CACFContextNeedsFlushNotification(), 0);
1439     }
1440
1441     if (avfWrapper->avPlayer()) {
1442         if (avfWrapper->timeObserver())
1443             AVCFPlayerRemoveObserver(avfWrapper->avPlayer(), avfWrapper->timeObserver());
1444
1445         CFNotificationCenterRemoveObserver(CFNotificationCenterGetLocalCenter(), avfWrapper->callbackContext(), AVCFPlayerRateChangedNotification, avfWrapper->avPlayer());
1446     }
1447
1448 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1449     AVCFPlayerItemRemoveOutput(avfWrapper->avPlayerItem(), avfWrapper->legibleOutput());
1450 #endif
1451
1452     // We must release the AVCFPlayer and other items on the same thread that created them.
1453     dispatch_async_f(dispatch_get_main_queue(), context, destroyAVFWrapper);
1454 }
1455
1456 void AVFWrapper::createAssetForURL(const String& url, bool inheritURI)
1457 {
1458     ASSERT(!avAsset());
1459
1460     RetainPtr<CFURLRef> urlRef = URL(ParsedURLString, url).createCFURL();
1461
1462     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1463
1464     if (inheritURI)
1465         CFDictionarySetValue(optionsRef.get(), AVCFURLAssetInheritURIQueryComponentFromReferencingURIKey, kCFBooleanTrue);
1466
1467     m_avAsset = adoptCF(AVCFURLAssetCreateWithURLAndOptions(kCFAllocatorDefault, urlRef.get(), optionsRef.get(), m_notificationQueue));
1468
1469 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1470     ASSERT(callbackContext());
1471     m_resourceLoaderCallbacks.context = callbackContext();
1472
1473     AVCFAssetResourceLoaderRef resourceLoader = AVCFURLAssetGetResourceLoader(m_avAsset.get());
1474     AVCFAssetResourceLoaderSetCallbacks(resourceLoader, &m_resourceLoaderCallbacks, globalLoaderDelegateQueue());
1475 #endif
1476 }
1477
1478 void AVFWrapper::createPlayer(IDirect3DDevice9* d3dDevice)
1479 {
1480     ASSERT(isMainThread());
1481     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1482     ASSERT(avPlayerItem());
1483
1484     if (avPlayer())
1485         return;
1486
1487     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1488
1489     if (d3dDevice) {
1490         // QI for an IDirect3DDevice9Ex interface, it is required to do HW video decoding.
1491         COMPtr<IDirect3DDevice9Ex> d3dEx(Query, d3dDevice);
1492         m_d3dDevice = d3dEx;
1493     } else
1494         m_d3dDevice = 0;
1495
1496     if (m_d3dDevice && AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey)
1497         CFDictionarySetValue(optionsRef.get(), AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey, kCFBooleanTrue);
1498
1499 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1500     CFDictionarySetValue(optionsRef.get(), AVCFPlayerAppliesMediaSelectionCriteriaAutomaticallyKey, kCFBooleanTrue);
1501 #endif
1502
1503     // FIXME: We need a way to create a AVPlayer without an AVPlayerItem, see <rdar://problem/9877730>.
1504     AVCFPlayerRef playerRef = AVCFPlayerCreateWithPlayerItemAndOptions(kCFAllocatorDefault, avPlayerItem(), optionsRef.get(), m_notificationQueue);
1505     m_avPlayer = adoptCF(playerRef);
1506 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1507     AVCFPlayerSetClosedCaptionDisplayEnabled(playerRef, FALSE);
1508 #endif
1509
1510     if (m_d3dDevice && AVCFPlayerSetDirect3DDevicePtr())
1511         AVCFPlayerSetDirect3DDevicePtr()(playerRef, m_d3dDevice.get());
1512
1513     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1514     ASSERT(center);
1515
1516     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerRateChangedNotification, playerRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1517
1518     // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but
1519     // our observer will also be called whenever a seek happens.
1520     const double veryLongInterval = 60*60*60*24*30;
1521     m_timeObserver = adoptCF(AVCFPlayerCreatePeriodicTimeObserverForInterval(playerRef, CMTimeMake(veryLongInterval, 10), m_notificationQueue, &periodicTimeObserverCallback, callbackContext()));
1522 }
1523
1524 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1525 static RetainPtr<CFArrayRef> createLegibleOutputSubtypes()
1526 {
1527     int webVTTInt = 'wvtt'; // kCMSubtitleFormatType_WebVTT;
1528     RetainPtr<CFNumberRef> webVTTNumber = adoptCF(CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &webVTTInt));
1529     CFTypeRef formatTypes[] = { webVTTNumber.get() };
1530     return adoptCF(CFArrayCreate(0, formatTypes, WTF_ARRAY_LENGTH(formatTypes), &kCFTypeArrayCallBacks));
1531 }
1532 #endif
1533
1534 void AVFWrapper::createPlayerItem()
1535 {
1536     ASSERT(isMainThread());
1537     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1538     ASSERT(avAsset());
1539
1540     if (avPlayerItem())
1541         return;
1542
1543     // Create the player item so we begin loading media data.
1544     AVCFPlayerItemRef itemRef = AVCFPlayerItemCreateWithAsset(kCFAllocatorDefault, avAsset(), m_notificationQueue);
1545     m_avPlayerItem = adoptCF(itemRef);
1546
1547     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1548     ASSERT(center);
1549
1550     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDidPlayToEndTimeNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1551     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemStatusChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1552     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemTracksChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1553     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemSeekableTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1554     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemLoadedTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1555     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemPresentationSizeChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1556     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1557     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1558     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferFullChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1559     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDurationChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1560     // FIXME: Are there other legible output things we need to register for? asset and hasEnabledAudio are not exposed by AVCF
1561
1562     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, CACFContextNeedsFlushNotification(), 0, CFNotificationSuspensionBehaviorDeliverImmediately);
1563
1564 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1565     const CFTimeInterval legibleOutputAdvanceInterval = 2;
1566
1567     m_legibleOutput = adoptCF(AVCFPlayerItemLegibleOutputCreateWithMediaSubtypesForNativeRepresentation(kCFAllocatorDefault, createLegibleOutputSubtypes().get()));
1568     AVCFPlayerItemOutputSetSuppressPlayerRendering(m_legibleOutput.get(), TRUE);
1569
1570     AVCFPlayerItemLegibleOutputCallbacks callbackInfo;
1571     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_1;
1572     ASSERT(callbackContext());
1573     callbackInfo.context = callbackContext();
1574     callbackInfo.legibleOutputCallback = AVFWrapper::legibleOutputCallback;
1575
1576     AVCFPlayerItemLegibleOutputSetCallbacks(m_legibleOutput.get(), &callbackInfo, dispatchQueue());
1577     AVCFPlayerItemLegibleOutputSetAdvanceIntervalForCallbackInvocation(m_legibleOutput.get(), legibleOutputAdvanceInterval);
1578     AVCFPlayerItemLegibleOutputSetTextStylingResolution(m_legibleOutput.get(), AVCFPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly);
1579     AVCFPlayerItemAddOutput(m_avPlayerItem.get(), m_legibleOutput.get());
1580 #endif
1581 }
1582
1583 void AVFWrapper::periodicTimeObserverCallback(AVCFPlayerRef, CMTime cmTime, void* context)
1584 {
1585     MutexLocker locker(mapLock());
1586     AVFWrapper* self = avfWrapperForCallbackContext(context);
1587     if (!self) {
1588         LOG(Media, "AVFWrapper::periodicTimeObserverCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1589         return;
1590     }
1591
1592     double time = std::max(0.0, CMTimeGetSeconds(cmTime)); // Clamp to zero, negative values are sometimes reported.
1593     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time);
1594 }
1595
1596 struct NotificationCallbackData {
1597     RetainPtr<CFStringRef> m_propertyName;
1598     void* m_context;
1599
1600     NotificationCallbackData(CFStringRef propertyName, void* context)
1601         : m_propertyName(propertyName), m_context(context)
1602     {
1603     }
1604 };
1605
1606 void AVFWrapper::processNotification(void* context)
1607 {
1608     ASSERT(isMainThread());
1609     ASSERT(context);
1610
1611     if (!context)
1612         return;
1613
1614     OwnPtr<NotificationCallbackData> notificationData = adoptPtr(reinterpret_cast<NotificationCallbackData*>(context));
1615
1616     MutexLocker locker(mapLock());
1617     AVFWrapper* self = avfWrapperForCallbackContext(notificationData->m_context);
1618     if (!self) {
1619         LOG(Media, "AVFWrapper::processNotification invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1620         return;
1621     }
1622
1623     CFStringRef propertyName = notificationData->m_propertyName.get();
1624
1625     if (CFEqual(propertyName, AVCFPlayerItemDidPlayToEndTimeNotification))
1626         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
1627     else if (CFEqual(propertyName, AVCFPlayerItemTracksChangedNotification))
1628         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1629     else if (CFEqual(propertyName, AVCFPlayerItemStatusChangedNotification)) {
1630         AVCFURLAssetRef asset = AVCFPlayerItemGetAsset(self->avPlayerItem());
1631         if (asset)
1632             self->setAsset(asset);
1633         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
1634     } else if (CFEqual(propertyName, AVCFPlayerItemSeekableTimeRangesChangedNotification))
1635         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
1636     else if (CFEqual(propertyName, AVCFPlayerItemLoadedTimeRangesChangedNotification))
1637         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
1638     else if (CFEqual(propertyName, AVCFPlayerItemPresentationSizeChangedNotification))
1639         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
1640     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification))
1641         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
1642     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification))
1643         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
1644     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferFullChangedNotification))
1645         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
1646     else if (CFEqual(propertyName, AVCFPlayerRateChangedNotification))
1647         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
1648     else if (CFEqual(propertyName, CACFContextNeedsFlushNotification()))
1649         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ContentsNeedsDisplay);
1650     else if (CFEqual(propertyName, AVCFPlayerItemDurationChangedNotification))
1651         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged);
1652     else
1653         ASSERT_NOT_REACHED();
1654 }
1655
1656 void AVFWrapper::notificationCallback(CFNotificationCenterRef, void* observer, CFStringRef propertyName, const void* object, CFDictionaryRef)
1657 {
1658 #if !LOG_DISABLED
1659     char notificationName[256];
1660     CFStringGetCString(propertyName, notificationName, sizeof(notificationName), kCFStringEncodingASCII);
1661     LOG(Media, "AVFWrapper::notificationCallback(if=%d) %s", reinterpret_cast<uintptr_t>(observer), notificationName);
1662 #endif
1663
1664     OwnPtr<NotificationCallbackData> notificationData = adoptPtr(new NotificationCallbackData(propertyName, observer));
1665
1666     dispatch_async_f(dispatch_get_main_queue(), notificationData.leakPtr(), processNotification);
1667 }
1668
1669 void AVFWrapper::loadPlayableCompletionCallback(AVCFAssetRef, void* context)
1670 {
1671     MutexLocker locker(mapLock());
1672     AVFWrapper* self = avfWrapperForCallbackContext(context);
1673     if (!self) {
1674         LOG(Media, "AVFWrapper::loadPlayableCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1675         return;
1676     }
1677
1678     LOG(Media, "AVFWrapper::loadPlayableCompletionCallback(%p)", self);
1679     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1680 }
1681
1682 void AVFWrapper::checkPlayability()
1683 {
1684     LOG(Media, "AVFWrapper::checkPlayability(%p)", this);
1685
1686     static CFArrayRef propertyKeyName;
1687     if (!propertyKeyName) {
1688         static const CFStringRef keyNames[] = { 
1689             AVCFAssetPropertyPlayable
1690         };
1691         propertyKeyName = CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
1692     }
1693
1694     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), propertyKeyName, loadPlayableCompletionCallback, callbackContext());
1695 }
1696
1697 void AVFWrapper::loadMetadataCompletionCallback(AVCFAssetRef, void* context)
1698 {
1699     MutexLocker locker(mapLock());
1700     AVFWrapper* self = avfWrapperForCallbackContext(context);
1701     if (!self) {
1702         LOG(Media, "AVFWrapper::loadMetadataCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1703         return;
1704     }
1705
1706     LOG(Media, "AVFWrapper::loadMetadataCompletionCallback(%p)", self);
1707     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
1708 }
1709
1710 void AVFWrapper::beginLoadingMetadata()
1711 {
1712     ASSERT(avAsset());
1713     LOG(Media, "AVFWrapper::beginLoadingMetadata(%p) - requesting metadata loading", this);
1714     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), metadataKeyNames(), loadMetadataCompletionCallback, callbackContext());
1715 }
1716
1717 void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void* context)
1718 {
1719     MutexLocker locker(mapLock());
1720     AVFWrapper* self = avfWrapperForCallbackContext(context);
1721     if (!self) {
1722         LOG(Media, "AVFWrapper::seekCompletedCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1723         return;
1724     }
1725
1726     LOG(Media, "AVFWrapper::seekCompletedCallback(%p)", self);
1727     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
1728 }
1729
1730 void AVFWrapper::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1731 {
1732     ASSERT(avPlayerItem());
1733     CMTime cmTime = toCMTime(time);
1734     CMTime cmBefore = toCMTime(negativeTolerance);
1735     CMTime cmAfter = toCMTime(positiveTolerance);
1736     AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), cmTime, cmBefore, cmAfter, &seekCompletedCallback, callbackContext());
1737 }
1738
1739 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1740 struct LegibleOutputData {
1741     RetainPtr<CFArrayRef> m_attributedStrings;
1742     RetainPtr<CFArrayRef> m_samples;
1743     MediaTime m_time;
1744     void* m_context;
1745
1746     LegibleOutputData(CFArrayRef strings, CFArrayRef samples, const MediaTime &time, void* context)
1747         : m_attributedStrings(strings), m_samples(samples), m_time(time), m_context(context)
1748     {
1749     }
1750 };
1751
1752 void AVFWrapper::processCue(void* context)
1753 {
1754     ASSERT(isMainThread());
1755     ASSERT(context);
1756
1757     if (!context)
1758         return;
1759
1760     std::unique_ptr<LegibleOutputData> legibleOutputData(reinterpret_cast<LegibleOutputData*>(context));
1761
1762     MutexLocker locker(mapLock());
1763     AVFWrapper* self = avfWrapperForCallbackContext(legibleOutputData->m_context);
1764     if (!self) {
1765         LOG(Media, "AVFWrapper::processCue invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1766         return;
1767     }
1768
1769     if (!self->m_currentTextTrack)
1770         return;
1771
1772     self->m_currentTextTrack->processCue(legibleOutputData->m_attributedStrings.get(), legibleOutputData->m_samples.get(), legibleOutputData->m_time);
1773 }
1774
1775 void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef legibleOutput, CFArrayRef attributedStrings, CFArrayRef nativeSampleBuffers, CMTime itemTime)
1776 {
1777     ASSERT(!isMainThread());
1778     MutexLocker locker(mapLock());
1779     AVFWrapper* self = avfWrapperForCallbackContext(context);
1780     if (!self) {
1781         LOG(Media, "AVFWrapper::legibleOutputCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1782         return;
1783     }
1784
1785     LOG(Media, "AVFWrapper::legibleOutputCallback(%p)", self);
1786
1787     ASSERT(legibleOutput == self->m_legibleOutput);
1788
1789     auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, toMediaTime(itemTime), context);
1790
1791     dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.release(), processCue);
1792 }
1793 #endif
1794
1795 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1796 struct LoadRequestData {
1797     RetainPtr<AVCFAssetResourceLoadingRequestRef> m_request;
1798     void* m_context;
1799
1800     LoadRequestData(AVCFAssetResourceLoadingRequestRef request, void* context)
1801         : m_request(request), m_context(context)
1802     {
1803     }
1804 };
1805
1806 void AVFWrapper::processShouldWaitForLoadingOfResource(void* context)
1807 {
1808     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1809     ASSERT(context);
1810
1811     if (!context)
1812         return;
1813
1814     std::unique_ptr<LoadRequestData> loadRequestData(reinterpret_cast<LoadRequestData*>(context));
1815
1816     MutexLocker locker(mapLock());
1817     AVFWrapper* self = avfWrapperForCallbackContext(loadRequestData->m_context);
1818     if (!self) {
1819         LOG(Media, "AVFWrapper::processShouldWaitForLoadingOfResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1820         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1821         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1822         return;
1823     }
1824
1825     if (!self->shouldWaitForLoadingOfResource(loadRequestData->m_request.get())) {
1826         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1827         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1828     }
1829 }
1830
1831 bool AVFWrapper::shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest)
1832 {
1833 #if ENABLE(ENCRYPTED_MEDIA_V2)
1834     RetainPtr<CFURLRequestRef> urlRequest = AVCFAssetResourceLoadingRequestGetURLRequest(avRequest);
1835     RetainPtr<CFURLRef> requestURL = CFURLRequestGetURL(urlRequest.get());
1836     RetainPtr<CFStringRef> schemeRef = adoptCF(CFURLCopyScheme(requestURL.get()));
1837     String scheme = schemeRef.get();
1838
1839     if (scheme == "skd") {
1840         RetainPtr<CFURLRef> absoluteURL = adoptCF(CFURLCopyAbsoluteURL(requestURL.get()));
1841         RetainPtr<CFStringRef> keyURIRef = CFURLGetString(absoluteURL.get());
1842         String keyURI = keyURIRef.get();
1843
1844         // Create an initData with the following layout:
1845         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1846         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1847         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1848         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1849         initDataView->set<uint32_t>(0, keyURISize, true);
1850
1851         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1852         keyURIArray->setRange(reinterpret_cast<const uint16_t*>(StringView(keyURI).upconvertedCharacters().get()), keyURI.length() / sizeof(unsigned char), 0);
1853
1854         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1855         if (!m_owner->player()->keyNeeded(initData.get()))
1856             return false;
1857
1858         setRequestForKey(keyURI, avRequest);
1859         return true;
1860     }
1861 #endif
1862
1863     RefPtr<WebCoreAVCFResourceLoader> resourceLoader = WebCoreAVCFResourceLoader::create(m_owner, avRequest);
1864     m_owner->m_resourceLoaderMap.add(avRequest, resourceLoader);
1865     resourceLoader->startLoading();
1866     return true;
1867 }
1868
1869 Boolean AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef resourceLoader, AVCFAssetResourceLoadingRequestRef loadingRequest, void *context)
1870 {
1871     ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue());
1872     MutexLocker locker(mapLock());
1873     AVFWrapper* self = avfWrapperForCallbackContext(context);
1874     if (!self) {
1875         LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1876         return false;
1877     }
1878
1879     LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(%p)", self);
1880
1881     auto loadRequestData = std::make_unique<LoadRequestData>(loadingRequest, context);
1882
1883     dispatch_async_f(dispatch_get_main_queue(), loadRequestData.release(), processShouldWaitForLoadingOfResource);
1884
1885     return true;
1886 }
1887 #endif
1888
1889 void AVFWrapper::setAsset(AVCFURLAssetRef asset)
1890 {
1891     if (asset == avAsset())
1892         return;
1893
1894     AVCFAssetCancelLoading(avAsset());
1895     m_avAsset = asset;
1896 }
1897
1898 PlatformLayer* AVFWrapper::platformLayer()
1899 {
1900     ASSERT(isMainThread());
1901     if (m_videoLayerWrapper)
1902         return m_videoLayerWrapper->platformLayer();
1903
1904     if (!videoLayer())
1905         return 0;
1906
1907     // Create a PlatformCALayer so we can resize the video layer to match the element size.
1908     m_layerClient = adoptPtr(new LayerClient(this));
1909     if (!m_layerClient)
1910         return 0;
1911
1912     m_videoLayerWrapper = PlatformCALayerWin::create(PlatformCALayer::LayerTypeLayer, m_layerClient.get());
1913     if (!m_videoLayerWrapper)
1914         return 0;
1915
1916     m_caVideoLayer = adoptCF(AVCFPlayerLayerCopyCACFLayer(m_avCFVideoLayer.get()));
1917
1918     CACFLayerInsertSublayer(m_videoLayerWrapper->platformLayer(), m_caVideoLayer.get(), 0);
1919     m_videoLayerWrapper->setAnchorPoint(FloatPoint3D());
1920     m_videoLayerWrapper->setNeedsLayout();
1921     updateVideoLayerGravity();
1922
1923     return m_videoLayerWrapper->platformLayer();
1924 }
1925
1926 void AVFWrapper::createAVCFVideoLayer()
1927 {
1928     ASSERT(isMainThread());
1929     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1930     if (!avPlayer() || m_avCFVideoLayer)
1931         return;
1932
1933     // The layer will get hooked up via RenderLayerBacking::updateConfiguration().
1934     m_avCFVideoLayer = adoptCF(AVCFPlayerLayerCreateWithAVCFPlayer(kCFAllocatorDefault, avPlayer(), m_notificationQueue));
1935     LOG(Media, "AVFWrapper::createAVCFVideoLayer(%p) - returning %p", this, videoLayer());
1936 }
1937
1938 void AVFWrapper::destroyVideoLayer()
1939 {
1940     ASSERT(isMainThread());
1941     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1942     LOG(Media, "AVFWrapper::destroyVideoLayer(%p)", this);
1943     m_layerClient = nullptr;
1944     m_caVideoLayer = 0;
1945     m_videoLayerWrapper = 0;
1946     if (!m_avCFVideoLayer.get())
1947         return;
1948
1949     AVCFPlayerLayerSetPlayer((AVCFPlayerLayerRef)m_avCFVideoLayer.get(), 0);
1950     m_avCFVideoLayer = 0;
1951 }
1952
1953 void AVFWrapper::setVideoLayerNeedsCommit()
1954 {
1955     if (m_videoLayerWrapper)
1956         m_videoLayerWrapper->setNeedsCommit();
1957 }
1958
1959 void AVFWrapper::setVideoLayerHidden(bool value)
1960 {
1961     if (m_videoLayerWrapper)
1962         m_videoLayerWrapper->setHidden(value);
1963 }
1964
1965 void AVFWrapper::createImageGenerator()
1966 {
1967     ASSERT(isMainThread());
1968     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1969     if (!avAsset() || m_imageGenerator)
1970         return;
1971
1972     m_imageGenerator = adoptCF(AVCFAssetImageGeneratorCreateWithAsset(kCFAllocatorDefault, avAsset()));
1973
1974     AVCFAssetImageGeneratorSetApertureMode(m_imageGenerator.get(), AVCFAssetImageGeneratorApertureModeCleanAperture);
1975     AVCFAssetImageGeneratorSetRequestedTimeToleranceBefore(m_imageGenerator.get(), kCMTimeZero);
1976     AVCFAssetImageGeneratorSetRequestedTimeToleranceAfter(m_imageGenerator.get(), kCMTimeZero);
1977     AVCFAssetImageGeneratorSetAppliesPreferredTrackTransform(m_imageGenerator.get(), true);
1978
1979     LOG(Media, "AVFWrapper::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
1980 }
1981
1982 void AVFWrapper::destroyImageGenerator()
1983 {
1984     ASSERT(isMainThread());
1985     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1986     LOG(Media, "AVFWrapper::destroyImageGenerator(%p)", this);
1987     m_imageGenerator = 0;
1988 }
1989
1990 RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(const MediaTime& time, const IntRect& rect)
1991 {
1992     if (!m_imageGenerator)
1993         return 0;
1994
1995 #if !LOG_DISABLED
1996     double start = monotonicallyIncreasingTime();
1997 #endif
1998
1999     AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size()));
2000     RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), toCMTime(time), 0, 0));
2001     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawimage.get(), adoptCF(CGColorSpaceCreateDeviceRGB()).get()));
2002
2003 #if !LOG_DISABLED
2004     double duration = monotonicallyIncreasingTime() - start;
2005     LOG(Media, "AVFWrapper::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
2006 #endif
2007
2008     return image;
2009 }
2010
2011 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2012 AVCFMediaSelectionGroupRef AVFWrapper::safeMediaSelectionGroupForLegibleMedia() const
2013 {
2014     if (!avAsset())
2015         return 0;
2016
2017     if (AVCFAssetGetStatusOfValueForProperty(avAsset(), AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) != AVCFPropertyValueStatusLoaded)
2018         return 0;
2019
2020     return AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(), AVCFMediaCharacteristicLegible);
2021 }
2022 #endif
2023
2024 void AVFWrapper::updateVideoLayerGravity()
2025 {
2026     // We should call AVCFPlayerLayerSetVideoGravity() here, but it is not yet implemented.
2027     // FIXME: <rdar://problem/14884340>
2028 }
2029
2030 #if ENABLE(ENCRYPTED_MEDIA_V2)
2031 void AVFWrapper::setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest)
2032 {
2033     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2034     if (requestsIterator != m_keyURIToRequestMap.end()) {
2035         requestsIterator->value.append(avRequest);
2036         return;
2037     }
2038
2039     Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>> requests;
2040     requests.append(avRequest);
2041     m_keyURIToRequestMap.set(keyURI, requests);
2042 }
2043
2044 RetainPtr<AVCFAssetResourceLoadingRequestRef> AVFWrapper::takeRequestForKeyURI(const String& keyURI)
2045 {
2046     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2047     if (requestsIterator == m_keyURIToRequestMap.end())
2048         return RetainPtr<AVCFAssetResourceLoadingRequestRef>();
2049
2050     auto request = requestsIterator->value.takeLast();
2051     if (requestsIterator->value.isEmpty())
2052         m_keyURIToRequestMap.take(keyURI);
2053
2054     return request;
2055 }
2056 #endif
2057
2058 void LayerClient::platformCALayerLayoutSublayersOfLayer(PlatformCALayer* wrapperLayer)
2059 {
2060     ASSERT(isMainThread());
2061     ASSERT(m_parent);
2062     ASSERT(m_parent->videoLayerWrapper() == wrapperLayer->platformLayer());
2063
2064     CGRect bounds = wrapperLayer->bounds();
2065     CGPoint anchor = CACFLayerGetAnchorPoint(m_parent->caVideoLayer());
2066     FloatPoint position(bounds.size.width * anchor.x, bounds.size.height * anchor.y); 
2067
2068     CACFLayerSetPosition(m_parent->caVideoLayer(), position);
2069     CACFLayerSetBounds(m_parent->caVideoLayer(), bounds);
2070
2071     AVCFPlayerLayerSetFrame(m_parent->videoLayer(), CGRectMake(0, 0, bounds.size.width, bounds.size.height));
2072 }
2073
2074 } // namespace WebCore
2075
2076 #else
2077 // AVFoundation should always be enabled for Apple production builds.
2078 #if __PRODUCTION__ && !USE(AVFOUNDATION)
2079 #error AVFoundation is not enabled!
2080 #endif // __PRODUCTION__ && !USE(AVFOUNDATION)
2081 #endif // USE(AVFOUNDATION)
2082 #endif // PLATFORM(WIN) && ENABLE(VIDEO)