5fdf1ebc5c726076f85af5e948b464fd92c91dfd
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / cf / MediaPlayerPrivateAVFoundationCF.cpp
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if PLATFORM(WIN) && ENABLE(VIDEO) 
29
30 #if USE(AVFOUNDATION)
31
32 #include "MediaPlayerPrivateAVFoundationCF.h"
33
34 #include "ApplicationCacheResource.h"
35 #include "CDMSessionAVFoundationCF.h"
36 #include "COMPtr.h"
37 #include "FloatConversion.h"
38 #include "FrameView.h"
39 #include "GraphicsContext.h"
40 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
41 #include "InbandTextTrackPrivateAVCF.h"
42 #else
43 #include "InbandTextTrackPrivateLegacyAVCF.h"
44 #endif
45 #include "MediaTimeAVFoundation.h"
46 #include "URL.h"
47 #include "Logging.h"
48 #include "PlatformCALayerWin.h"
49 #include "SoftLinking.h"
50 #include "TimeRanges.h"
51 #include "WebCoreAVCFResourceLoader.h"
52
53 #include <AVFoundationCF/AVCFPlayerItem.h>
54 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
55 #include <AVFoundationCF/AVCFPlayerItemLegibleOutput.h>
56 #endif
57 #include <AVFoundationCF/AVCFPlayerLayer.h>
58 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) || HAVE(ENCRYPTED_MEDIA_V2)
59 #include <AVFoundationCF/AVCFAssetResourceLoader.h>
60 #endif
61 #include <AVFoundationCF/AVFoundationCF.h>
62 #include <CoreMedia/CoreMedia.h>
63 #include <d3d9.h>
64 #include <delayimp.h>
65 #include <dispatch/dispatch.h>
66 #if ENABLE(ENCRYPTED_MEDIA_V2)
67 #include <runtime/DataView.h>
68 #include <runtime/Uint16Array.h>
69 #endif
70 #include <wtf/HashMap.h>
71 #include <wtf/Threading.h>
72 #include <wtf/text/CString.h>
73 #include <wtf/text/StringView.h>
74 #include <wtf/StringPrintStream.h>
75
76 // The softlink header files must be included after the AVCF and CoreMedia header files.
77 #include "AVFoundationCFSoftLinking.h"
78 #include "CoreMediaSoftLinking.h"
79
80 // We don't bother softlinking against libdispatch since it's already been loaded by AAS.
81 #ifdef DEBUG_ALL
82 #pragma comment(lib, "libdispatch_debug.lib")
83 #else
84 #pragma comment(lib, "libdispatch.lib")
85 #endif
86
87 using namespace std;
88
89 namespace WebCore {
90
91 class LayerClient;
92
93 class AVFWrapper {
94 public:
95     AVFWrapper(MediaPlayerPrivateAVFoundationCF*);
96     ~AVFWrapper();
97
98     void scheduleDisconnectAndDelete();
99
100     void createAVCFVideoLayer();
101     void destroyVideoLayer();
102     PlatformLayer* platformLayer();
103
104     CACFLayerRef caVideoLayer() { return m_caVideoLayer.get(); }
105     PlatformLayer* videoLayerWrapper() { return m_videoLayerWrapper ? m_videoLayerWrapper->platformLayer() : 0; };
106     void setVideoLayerNeedsCommit();
107     void setVideoLayerHidden(bool);
108
109     void createImageGenerator();
110     void destroyImageGenerator();
111     RetainPtr<CGImageRef> createImageForTimeInRect(const MediaTime&, const IntRect&);
112
113     void createAssetForURL(const String& url, bool inheritURI);
114     void setAsset(AVCFURLAssetRef);
115     
116     void createPlayer(IDirect3DDevice9*);
117     void createPlayerItem();
118     
119     void checkPlayability();
120     void beginLoadingMetadata();
121     
122     void seekToTime(const MediaTime&, const MediaTime&, const MediaTime&);
123     void updateVideoLayerGravity();
124
125     void setCurrentTextTrack(InbandTextTrackPrivateAVF*);
126     InbandTextTrackPrivateAVF* currentTextTrack() const { return m_currentTextTrack; }
127
128 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
129     static void legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef, CFArrayRef attributedString, CFArrayRef nativeSampleBuffers, CMTime itemTime);
130     static void processCue(void* context);
131 #endif
132 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
133     static Boolean resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef, AVCFAssetResourceLoadingRequestRef, void* context);
134 #endif
135     static void loadMetadataCompletionCallback(AVCFAssetRef, void*);
136     static void loadPlayableCompletionCallback(AVCFAssetRef, void*);
137     static void periodicTimeObserverCallback(AVCFPlayerRef, CMTime, void*);
138     static void seekCompletedCallback(AVCFPlayerItemRef, Boolean, void*);
139     static void notificationCallback(CFNotificationCenterRef, void*, CFStringRef, const void*, CFDictionaryRef);
140     static void processNotification(void* context);
141
142     inline AVCFPlayerLayerRef videoLayer() const { return (AVCFPlayerLayerRef)m_avCFVideoLayer.get(); }
143     inline AVCFPlayerRef avPlayer() const { return (AVCFPlayerRef)m_avPlayer.get(); }
144     inline AVCFURLAssetRef avAsset() const { return (AVCFURLAssetRef)m_avAsset.get(); }
145     inline AVCFPlayerItemRef avPlayerItem() const { return (AVCFPlayerItemRef)m_avPlayerItem.get(); }
146     inline AVCFPlayerObserverRef timeObserver() const { return (AVCFPlayerObserverRef)m_timeObserver.get(); }
147     inline AVCFAssetImageGeneratorRef imageGenerator() const { return m_imageGenerator.get(); }
148 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
149     inline AVCFPlayerItemLegibleOutputRef legibleOutput() const { return m_legibleOutput.get(); }
150     AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia() const;
151 #endif
152     inline dispatch_queue_t dispatchQueue() const { return m_notificationQueue; }
153
154 #if ENABLE(ENCRYPTED_MEDIA_V2)
155     RetainPtr<AVCFAssetResourceLoadingRequestRef> takeRequestForKeyURI(const String&);
156 #endif
157
158 private:
159     inline void* callbackContext() const { return reinterpret_cast<void*>(m_objectID); }
160
161     static Mutex& mapLock();
162     static HashMap<uintptr_t, AVFWrapper*>& map();
163     static AVFWrapper* avfWrapperForCallbackContext(void*);
164     void addToMap();
165     void removeFromMap() const;
166 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
167     bool shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest);
168     static void processShouldWaitForLoadingOfResource(void* context);
169 #endif
170
171     static void disconnectAndDeleteAVFWrapper(void*);
172
173     static uintptr_t s_nextAVFWrapperObjectID;
174     uintptr_t m_objectID;
175
176     MediaPlayerPrivateAVFoundationCF* m_owner;
177
178     RetainPtr<AVCFPlayerRef> m_avPlayer;
179     RetainPtr<AVCFURLAssetRef> m_avAsset;
180     RetainPtr<AVCFPlayerItemRef> m_avPlayerItem;
181     RetainPtr<AVCFPlayerLayerRef> m_avCFVideoLayer;
182     RetainPtr<AVCFPlayerObserverRef> m_timeObserver;
183     RetainPtr<AVCFAssetImageGeneratorRef> m_imageGenerator;
184 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
185     RetainPtr<AVCFPlayerItemLegibleOutputRef> m_legibleOutput;
186     RetainPtr<AVCFMediaSelectionGroupRef> m_selectionGroup;
187 #endif
188
189     dispatch_queue_t m_notificationQueue;
190
191     mutable RetainPtr<CACFLayerRef> m_caVideoLayer;
192     RefPtr<PlatformCALayer> m_videoLayerWrapper;
193
194     OwnPtr<LayerClient> m_layerClient;
195     COMPtr<IDirect3DDevice9Ex> m_d3dDevice;
196
197     InbandTextTrackPrivateAVF* m_currentTextTrack;
198
199 #if ENABLE(ENCRYPTED_MEDIA_V2)
200     HashMap<String, RetainPtr<AVCFAssetResourceLoadingRequestRef>> m_keyURIToRequestMap;
201 #endif
202 };
203
204 uintptr_t AVFWrapper::s_nextAVFWrapperObjectID;
205
206 class LayerClient : public PlatformCALayerClient {
207 public:
208     LayerClient(AVFWrapper* parent) : m_parent(parent) { }
209     virtual ~LayerClient() { m_parent = 0; }
210
211 private:
212     virtual void platformCALayerLayoutSublayersOfLayer(PlatformCALayer*);
213     virtual bool platformCALayerRespondsToLayoutChanges() const { return true; }
214
215     virtual void platformCALayerAnimationStarted(CFTimeInterval beginTime) { }
216     virtual GraphicsLayer::CompositingCoordinatesOrientation platformCALayerContentsOrientation() const { return GraphicsLayer::CompositingCoordinatesBottomUp; }
217     virtual void platformCALayerPaintContents(PlatformCALayer*, GraphicsContext&, const FloatRect&) { }
218     virtual bool platformCALayerShowDebugBorders() const { return false; }
219     virtual bool platformCALayerShowRepaintCounter(PlatformCALayer*) const { return false; }
220     virtual int platformCALayerIncrementRepaintCount(PlatformCALayer*) { return 0; }
221
222     virtual bool platformCALayerContentsOpaque() const { return false; }
223     virtual bool platformCALayerDrawsContent() const { return false; }
224     virtual float platformCALayerDeviceScaleFactor() const { return 1; }
225
226     AVFWrapper* m_parent;
227 };
228
229 #if !LOG_DISABLED
230 static const char* boolString(bool val)
231 {
232     return val ? "true" : "false";
233 }
234 #endif
235
236 static CFArrayRef createMetadataKeyNames()
237 {
238     static const CFStringRef keyNames[] = {
239         AVCFAssetPropertyDuration,
240         AVCFAssetPropertyNaturalSize,
241         AVCFAssetPropertyPreferredTransform,
242         AVCFAssetPropertyPreferredRate,
243         AVCFAssetPropertyPlayable,
244         AVCFAssetPropertyTracks,
245 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
246         AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions,
247 #endif
248     };
249     
250     return CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
251 }
252
253 static CFArrayRef metadataKeyNames()
254 {
255     static CFArrayRef keys = createMetadataKeyNames();
256     return keys;
257 }
258
259 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
260 static CFStringRef CMTimeRangeStartKey()
261 {
262     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("start")));
263     return key;
264 }
265
266 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
267 static CFStringRef CMTimeRangeDurationKey()
268 {
269     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("duration")));
270     return key;
271 }
272
273 // FIXME: It would be better if AVCF exported this notification name.
274 static CFStringRef CACFContextNeedsFlushNotification()
275 {
276     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, name, (CFSTR("kCACFContextNeedsFlushNotification")));
277     return name;
278 }
279
280 // Define AVCF object accessors as inline functions here instead of in MediaPlayerPrivateAVFoundationCF so we don't have
281 // to include the AVCF headers in MediaPlayerPrivateAVFoundationCF.h
282 inline AVCFPlayerLayerRef videoLayer(AVFWrapper* wrapper)
283
284     return wrapper ? wrapper->videoLayer() : 0; 
285 }
286
287 inline AVCFPlayerRef avPlayer(AVFWrapper* wrapper)
288
289     return wrapper ? wrapper->avPlayer() : 0; 
290 }
291
292 inline AVCFURLAssetRef avAsset(AVFWrapper* wrapper)
293
294     return wrapper ? wrapper->avAsset() : 0; 
295 }
296
297 inline AVCFPlayerItemRef avPlayerItem(AVFWrapper* wrapper)
298
299     return wrapper ? wrapper->avPlayerItem() : 0; 
300 }
301
302 inline AVCFAssetImageGeneratorRef imageGenerator(AVFWrapper* wrapper)
303
304     return wrapper ? wrapper->imageGenerator() : 0; 
305 }
306
307 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
308 inline AVCFPlayerItemLegibleOutputRef avLegibleOutput(AVFWrapper* wrapper)
309 {
310     return wrapper ? wrapper->legibleOutput() : 0;
311 }
312
313 inline AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia(AVFWrapper* wrapper)
314 {
315     return wrapper ? wrapper->safeMediaSelectionGroupForLegibleMedia() : 0;
316 }
317 #endif
318
319 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
320 static dispatch_queue_t globalQueue = nullptr;
321
322 static void initGlobalLoaderDelegateQueue(void* ctx)
323 {
324     globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
325 }
326
327 static dispatch_queue_t globalLoaderDelegateQueue()
328 {
329     static dispatch_once_t onceToken;
330
331     dispatch_once_f(&onceToken, nullptr, initGlobalLoaderDelegateQueue);
332
333     return globalQueue;
334 }
335 #endif
336
337 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationCF::create(MediaPlayer* player) 
338
339     return adoptPtr(new MediaPlayerPrivateAVFoundationCF(player));
340 }
341
342 void MediaPlayerPrivateAVFoundationCF::registerMediaEngine(MediaEngineRegistrar registrar)
343 {
344     if (isAvailable())
345         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, 0);
346 }
347
348 MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(MediaPlayer* player)
349     : MediaPlayerPrivateAVFoundation(player)
350     , m_avfWrapper(0)
351     , m_videoFrameHasDrawn(false)
352 {
353     LOG(Media, "MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(%p)", this);
354 }
355
356 MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF()
357 {
358     LOG(Media, "MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF(%p)", this);
359 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
360     for (auto& pair : m_resourceLoaderMap)
361         pair.value->invalidate();
362 #endif
363     cancelLoad();
364 }
365
366 void MediaPlayerPrivateAVFoundationCF::cancelLoad()
367 {
368     LOG(Media, "MediaPlayerPrivateAVFoundationCF::cancelLoad(%p)", this);
369
370     // Do nothing when our cancellation of pending loading calls its completion handler
371     setDelayCallbacks(true);
372     setIgnoreLoadStateChanges(true);
373
374     tearDownVideoRendering();
375
376     clearTextTracks();
377
378     if (m_avfWrapper) {
379         // The AVCF objects have to be destroyed on the same dispatch queue used for notifications, so schedule a call to 
380         // disconnectAndDeleteAVFWrapper on that queue. 
381         m_avfWrapper->scheduleDisconnectAndDelete();
382         m_avfWrapper = 0;
383     }
384
385     setIgnoreLoadStateChanges(false);
386     setDelayCallbacks(false);
387 }
388
389 void MediaPlayerPrivateAVFoundationCF::updateVideoLayerGravity()
390 {
391     ASSERT(supportsAcceleratedRendering());
392
393     if (m_avfWrapper)
394         m_avfWrapper->updateVideoLayerGravity();
395 }
396
397 bool MediaPlayerPrivateAVFoundationCF::hasLayerRenderer() const
398 {
399     return videoLayer(m_avfWrapper);
400 }
401
402 bool MediaPlayerPrivateAVFoundationCF::hasContextRenderer() const
403 {
404     return imageGenerator(m_avfWrapper);
405 }
406
407 void MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer()
408 {
409     LOG(Media, "MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer(%p)", this);
410
411     if (imageGenerator(m_avfWrapper))
412         return;
413
414     if (m_avfWrapper)
415         m_avfWrapper->createImageGenerator();
416 }
417
418 void MediaPlayerPrivateAVFoundationCF::destroyContextVideoRenderer()
419 {
420     if (m_avfWrapper)
421         m_avfWrapper->destroyImageGenerator();
422 }
423
424 void MediaPlayerPrivateAVFoundationCF::createVideoLayer()
425 {
426     ASSERT(supportsAcceleratedRendering());
427
428     if (m_avfWrapper)
429         m_avfWrapper->createAVCFVideoLayer();
430 }
431
432 void MediaPlayerPrivateAVFoundationCF::destroyVideoLayer()
433 {
434     LOG(Media, "MediaPlayerPrivateAVFoundationCF::destroyVideoLayer(%p) - destroying %p", this, videoLayer(m_avfWrapper));
435     if (m_avfWrapper)
436         m_avfWrapper->destroyVideoLayer();
437 }
438
439 bool MediaPlayerPrivateAVFoundationCF::hasAvailableVideoFrame() const
440 {
441     return (m_videoFrameHasDrawn || (videoLayer(m_avfWrapper) && AVCFPlayerLayerIsReadyForDisplay(videoLayer(m_avfWrapper))));
442 }
443
444 void MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
445 {
446     if (m_avfWrapper)
447         m_avfWrapper->setCurrentTextTrack(track);
448 }
449
450 InbandTextTrackPrivateAVF* MediaPlayerPrivateAVFoundationCF::currentTextTrack() const
451 {
452     if (m_avfWrapper)
453         return m_avfWrapper->currentTextTrack();
454
455     return 0;
456 }
457
458 void MediaPlayerPrivateAVFoundationCF::createAVAssetForURL(const String& url)
459 {
460     ASSERT(!m_avfWrapper);
461
462     setDelayCallbacks(true);
463
464     bool inheritURI = player()->doesHaveAttribute("x-itunes-inherit-uri-query-component");
465
466     m_avfWrapper = new AVFWrapper(this);
467     m_avfWrapper->createAssetForURL(url, inheritURI);
468     setDelayCallbacks(false);
469 }
470
471 void MediaPlayerPrivateAVFoundationCF::createAVPlayer()
472 {
473     ASSERT(m_avfWrapper);
474     
475     setDelayCallbacks(true);
476     m_avfWrapper->createPlayer(reinterpret_cast<IDirect3DDevice9*>(player()->graphicsDeviceAdapter()));
477     setDelayCallbacks(false);
478 }
479
480 void MediaPlayerPrivateAVFoundationCF::createAVPlayerItem()
481 {
482     ASSERT(m_avfWrapper);
483     
484     setDelayCallbacks(true);
485     m_avfWrapper->createPlayerItem();
486
487     setDelayCallbacks(false);
488 }
489
490 void MediaPlayerPrivateAVFoundationCF::checkPlayability()
491 {
492     ASSERT(m_avfWrapper);
493     m_avfWrapper->checkPlayability();
494 }
495
496 void MediaPlayerPrivateAVFoundationCF::beginLoadingMetadata()
497 {
498     ASSERT(m_avfWrapper);
499     m_avfWrapper->beginLoadingMetadata();
500 }
501
502 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationCF::playerItemStatus() const
503 {
504     if (!avPlayerItem(m_avfWrapper))
505         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
506
507     AVCFPlayerItemStatus status = AVCFPlayerItemGetStatus(avPlayerItem(m_avfWrapper), 0);
508     if (status == AVCFPlayerItemStatusUnknown)
509         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
510     if (status == AVCFPlayerItemStatusFailed)
511         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
512     if (AVCFPlayerItemIsPlaybackLikelyToKeepUp(avPlayerItem(m_avfWrapper)))
513         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
514     if (AVCFPlayerItemIsPlaybackBufferFull(avPlayerItem(m_avfWrapper)))
515         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
516     if (AVCFPlayerItemIsPlaybackBufferEmpty(avPlayerItem(m_avfWrapper)))
517         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
518     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
519 }
520
521 PlatformMedia MediaPlayerPrivateAVFoundationCF::platformMedia() const
522 {
523     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformMedia(%p)", this);
524     PlatformMedia pm;
525     pm.type = PlatformMedia::AVFoundationCFMediaPlayerType;
526     pm.media.avcfMediaPlayer = (AVCFPlayer*)avPlayer(m_avfWrapper);
527     return pm;
528 }
529
530 PlatformLayer* MediaPlayerPrivateAVFoundationCF::platformLayer() const
531 {
532     if (!m_avfWrapper)
533         return 0;
534
535     return m_avfWrapper->platformLayer();
536 }
537
538 void MediaPlayerPrivateAVFoundationCF::platformSetVisible(bool isVisible)
539 {
540     if (!m_avfWrapper)
541         return;
542     
543     // FIXME: We use a CATransaction here on the Mac, we need to figure out why this was done there and
544     // whether we're affected by the same issue.
545     setDelayCallbacks(true);
546     m_avfWrapper->setVideoLayerHidden(!isVisible);    
547     if (!isVisible)
548         tearDownVideoRendering();
549     setDelayCallbacks(false);
550 }
551
552 void MediaPlayerPrivateAVFoundationCF::platformPlay()
553 {
554     LOG(Media, "MediaPlayerPrivateAVFoundationCF::play(%p)", this);
555     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
556         return;
557
558     setDelayCallbacks(true);
559     AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate());
560     setDelayCallbacks(false);
561 }
562
563 void MediaPlayerPrivateAVFoundationCF::platformPause()
564 {
565     LOG(Media, "MediaPlayerPrivateAVFoundationCF::pause(%p)", this);
566     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
567         return;
568
569     setDelayCallbacks(true);
570     AVCFPlayerSetRate(avPlayer(m_avfWrapper), 0);
571     setDelayCallbacks(false);
572 }
573
574 MediaTime MediaPlayerPrivateAVFoundationCF::platformDuration() const
575 {
576     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
577         return MediaTime::zeroTime();
578
579     CMTime cmDuration;
580
581     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
582     if (avPlayerItem(m_avfWrapper) && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
583         cmDuration = AVCFPlayerItemGetDuration(avPlayerItem(m_avfWrapper));
584     else
585         cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper));
586
587     if (CMTIME_IS_NUMERIC(cmDuration))
588         return toMediaTime(cmDuration);
589
590     if (CMTIME_IS_INDEFINITE(cmDuration))
591         return MediaTime::positiveInfiniteTime();
592
593     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
594     return MediaTime::invalidTime();
595 }
596
597 MediaTime MediaPlayerPrivateAVFoundationCF::currentMediaTime() const
598 {
599     if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper))
600         return MediaTime::zeroTime();
601
602     CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper));
603     if (CMTIME_IS_NUMERIC(itemTime))
604         return max(toMediaTime(itemTime), MediaTime::zeroTime());
605
606     return MediaTime::zeroTime();
607 }
608
609 void MediaPlayerPrivateAVFoundationCF::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
610 {
611     if (!m_avfWrapper)
612         return;
613     
614     // seekToTime generates several event callbacks, update afterwards.
615     setDelayCallbacks(true);
616     m_avfWrapper->seekToTime(time, negativeTolerance, positiveTolerance);
617     setDelayCallbacks(false);
618 }
619
620 void MediaPlayerPrivateAVFoundationCF::setVolume(float volume)
621 {
622     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
623         return;
624
625     AVCFPlayerSetVolume(avPlayer(m_avfWrapper), volume);
626 }
627
628 void MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(bool closedCaptionsVisible)
629 {
630     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
631         return;
632
633     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible));
634     AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(m_avfWrapper), closedCaptionsVisible);
635 }
636
637 void MediaPlayerPrivateAVFoundationCF::updateRate()
638 {
639     LOG(Media, "MediaPlayerPrivateAVFoundationCF::updateRate(%p)", this);
640     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
641         return;
642
643     setDelayCallbacks(true);
644     AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate());
645     setDelayCallbacks(false);
646 }
647
648 float MediaPlayerPrivateAVFoundationCF::rate() const
649 {
650     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
651         return 0;
652
653     setDelayCallbacks(true);
654     float currentRate = AVCFPlayerGetRate(avPlayer(m_avfWrapper));
655     setDelayCallbacks(false);
656
657     return currentRate;
658 }
659
660 static bool timeRangeIsValidAndNotEmpty(CMTime start, CMTime duration)
661 {
662     // Is the range valid?
663     if (!CMTIME_IS_VALID(start) || !CMTIME_IS_VALID(duration) || duration.epoch || duration.value < 0)
664         return false;
665
666     if (CMTIME_COMPARE_INLINE(duration, ==, kCMTimeZero))
667         return false;
668
669     return true;
670 }
671
672 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationCF::platformBufferedTimeRanges() const
673 {
674     auto timeRanges = PlatformTimeRanges::create();
675
676     if (!avPlayerItem(m_avfWrapper))
677         return timeRanges;
678
679     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
680     if (!loadedRanges)
681         return timeRanges;
682
683     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
684     for (CFIndex i = 0; i < rangeCount; i++) {
685         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
686         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
687         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
688         
689         if (timeRangeIsValidAndNotEmpty(start, duration)) {
690             MediaTime rangeStart = toMediaTime(start);
691             MediaTime rangeEnd = rangeStart + toMediaTime(duration);
692             timeRanges->add(rangeStart, rangeEnd);
693         }
694     }
695
696     return timeRanges;
697 }
698
699 MediaTime MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 
700
701     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
702     if (!seekableRanges) 
703         return MediaTime::zeroTime(); 
704
705     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
706     bool hasValidRange = false; 
707     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
708     for (CFIndex i = 0; i < rangeCount; i++) {
709         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
710         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
711         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
712         if (!timeRangeIsValidAndNotEmpty(start, duration))
713             continue;
714
715         hasValidRange = true; 
716         MediaTime startOfRange = toMediaTime(start); 
717         if (minTimeSeekable > startOfRange) 
718             minTimeSeekable = startOfRange; 
719     } 
720     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime(); 
721
722
723 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
724 {
725     if (!avPlayerItem(m_avfWrapper))
726         return MediaTime::zeroTime();
727
728     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
729     if (!seekableRanges)
730         return MediaTime::zeroTime();
731
732     MediaTime maxTimeSeekable;
733     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
734     for (CFIndex i = 0; i < rangeCount; i++) {
735         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
736         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
737         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
738         if (!timeRangeIsValidAndNotEmpty(start, duration))
739             continue;
740         
741         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
742         if (maxTimeSeekable < endOfRange)
743             maxTimeSeekable = endOfRange;
744     }
745
746     return maxTimeSeekable;   
747 }
748
749 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
750 {
751     if (!avPlayerItem(m_avfWrapper))
752         return MediaTime::zeroTime();
753
754     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
755     if (!loadedRanges)
756         return MediaTime::zeroTime();
757
758     MediaTime maxTimeLoaded;
759     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
760     for (CFIndex i = 0; i < rangeCount; i++) {
761         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
762         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
763         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
764         if (!timeRangeIsValidAndNotEmpty(start, duration))
765             continue;
766         
767         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
768         if (maxTimeLoaded < endOfRange)
769             maxTimeLoaded = endOfRange;
770     }
771
772     return maxTimeLoaded;   
773 }
774
775 unsigned long long MediaPlayerPrivateAVFoundationCF::totalBytes() const
776 {
777     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
778         return 0;
779
780     int64_t totalMediaSize = 0;
781     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyAssetTracks(avAsset(m_avfWrapper)));
782     CFIndex trackCount = CFArrayGetCount(tracks.get());
783     for (CFIndex i = 0; i < trackCount; i++) {
784         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), i);
785         totalMediaSize += AVCFAssetTrackGetTotalSampleDataLength(assetTrack);
786     }
787
788     return static_cast<unsigned long long>(totalMediaSize);
789 }
790
791 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationCF::assetStatus() const
792 {
793     if (!avAsset(m_avfWrapper))
794         return MediaPlayerAVAssetStatusDoesNotExist;
795
796     // First, make sure all metadata properties we rely on are loaded.
797     CFArrayRef keys = metadataKeyNames();
798     CFIndex keyCount = CFArrayGetCount(keys);
799     for (CFIndex i = 0; i < keyCount; i++) {
800         CFStringRef keyName = static_cast<CFStringRef>(CFArrayGetValueAtIndex(keys, i));
801         AVCFPropertyValueStatus keyStatus = AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), keyName, 0);
802
803         if (keyStatus < AVCFPropertyValueStatusLoaded)
804             return MediaPlayerAVAssetStatusLoading;
805         if (keyStatus == AVCFPropertyValueStatusFailed) {
806             if (CFStringCompare(keyName, AVCFAssetPropertyNaturalSize, 0) == kCFCompareEqualTo) {
807                 // Don't treat a failure to retrieve @"naturalSize" as fatal. We will use @"presentationSize" instead.
808                 // <rdar://problem/15966685>
809                 continue;
810             }
811 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
812             if (CFStringCompare(keyName, AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) == kCFCompareEqualTo) {
813                 // On Windows, the media selection options are not available when initially interacting with a streaming source.
814                 // <rdar://problem/16160699>
815                 continue;
816             }
817 #endif
818             return MediaPlayerAVAssetStatusFailed;
819         }
820         if (keyStatus == AVCFPropertyValueStatusCancelled)
821             return MediaPlayerAVAssetStatusCancelled;
822     }
823
824     if (AVCFAssetIsPlayable(avAsset(m_avfWrapper)))
825         return MediaPlayerAVAssetStatusPlayable;
826
827     return MediaPlayerAVAssetStatusLoaded;
828 }
829
830 void MediaPlayerPrivateAVFoundationCF::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
831 {
832     if (!metaDataAvailable() || context->paintingDisabled())
833         return;
834
835     if (currentRenderingMode() == MediaRenderingToLayer && !imageGenerator(m_avfWrapper)) {
836         // We're being told to render into a context, but we already have the
837         // video layer, which probably means we've been called from <canvas>.
838         createContextVideoRenderer();
839     }
840
841     paint(context, rect);
842 }
843
844 void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext* context, const IntRect& rect)
845 {
846     if (!metaDataAvailable() || context->paintingDisabled() || !imageGenerator(m_avfWrapper))
847         return;
848
849     LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this);
850
851     setDelayCallbacks(true);
852     RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentMediaTime(), rect);
853     if (image) {
854         context->save();
855         context->translate(rect.x(), rect.y() + rect.height());
856         context->scale(FloatSize(1.0f, -1.0f));
857         context->setImageInterpolationQuality(InterpolationLow);
858         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
859         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
860         context->restore();
861         image = 0;
862     }
863     setDelayCallbacks(false);
864     
865     m_videoFrameHasDrawn = true;
866 }
867
868 static HashSet<String> mimeTypeCache()
869 {
870     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
871     static bool typeListInitialized = false;
872
873     if (typeListInitialized)
874         return cache;
875     typeListInitialized = true;
876
877     RetainPtr<CFArrayRef> supportedTypes = adoptCF(AVCFURLAssetCopyAudiovisualMIMETypes());
878     
879     ASSERT(supportedTypes);
880     if (!supportedTypes)
881         return cache;
882
883     CFIndex typeCount = CFArrayGetCount(supportedTypes.get());
884     for (CFIndex i = 0; i < typeCount; i++)
885         cache.add(static_cast<CFStringRef>(CFArrayGetValueAtIndex(supportedTypes.get(), i)));
886
887     return cache;
888
889
890 void MediaPlayerPrivateAVFoundationCF::getSupportedTypes(HashSet<String>& supportedTypes)
891 {
892     supportedTypes = mimeTypeCache();
893
894
895 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
896 static bool keySystemIsSupported(const String& keySystem)
897 {
898     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
899         return true;
900     return false;
901 }
902 #endif
903
904 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationCF::supportsType(const MediaEngineSupportParameters& parameters)
905 {
906     // Only return "IsSupported" if there is no codecs parameter for now as there is no way to ask if it supports an
907     // extended MIME type until rdar://8721715 is fixed.
908     if (mimeTypeCache().contains(parameters.type))
909         return parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
910
911     return MediaPlayer::IsNotSupported;
912 }
913
914
915 bool MediaPlayerPrivateAVFoundationCF::isAvailable()
916 {
917     return AVFoundationCFLibrary() && CoreMediaLibrary();
918 }
919
920 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
921 void MediaPlayerPrivateAVFoundationCF::didCancelLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
922 {
923     WebCoreAVCFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
924
925     if (resourceLoader)
926         resourceLoader->stopLoading();
927 }
928
929 void MediaPlayerPrivateAVFoundationCF::didStopLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
930 {
931     m_resourceLoaderMap.remove(avRequest);
932 }
933 #endif
934
935 MediaTime MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(const MediaTime& timeValue) const
936 {
937     if (!metaDataAvailable())
938         return timeValue;
939
940     // FIXME - can not implement until rdar://8721669 is fixed.
941     return timeValue;
942 }
943
944 void MediaPlayerPrivateAVFoundationCF::tracksChanged()
945 {
946     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
947     m_languageOfPrimaryAudioTrack = String();
948
949     if (!avAsset(m_avfWrapper))
950         return;
951
952     setDelayCharacteristicsChangedNotification(true);
953
954     bool haveCCTrack = false;
955     bool hasCaptions = false;
956
957     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
958     // asked about those fairly frequently.
959     if (!avPlayerItem(m_avfWrapper)) {
960         // We don't have a player item yet, so check with the asset because some assets support inspection
961         // prior to becoming ready to play.
962         RetainPtr<CFArrayRef> visualTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
963         setHasVideo(CFArrayGetCount(visualTracks.get()));
964
965         RetainPtr<CFArrayRef> audioTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible));
966         setHasAudio(CFArrayGetCount(audioTracks.get()));
967
968 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
969         RetainPtr<CFArrayRef> captionTracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeClosedCaption));
970         hasCaptions = CFArrayGetCount(captionTracks.get());
971 #endif
972     } else {
973         bool hasVideo = false;
974         bool hasAudio = false;
975
976         RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
977
978         CFIndex trackCount = CFArrayGetCount(tracks.get());
979         for (CFIndex i = 0; i < trackCount; i++) {
980             AVCFPlayerItemTrackRef track = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
981             
982             if (AVCFPlayerItemTrackIsEnabled(track)) {
983                 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(track));
984                 if (!assetTrack) {
985                     // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
986                     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
987                     continue;
988                 }
989                 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
990                 if (!mediaType)
991                     continue;
992                 
993                 if (CFStringCompare(mediaType, AVCFMediaTypeVideo, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
994                     hasVideo = true;
995                 else if (CFStringCompare(mediaType, AVCFMediaTypeAudio, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
996                     hasAudio = true;
997                 else if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) == kCFCompareEqualTo) {
998 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
999                     hasCaptions = true;
1000 #endif
1001                     haveCCTrack = true;
1002                 }
1003             }
1004         }
1005
1006         setHasVideo(hasVideo);
1007         setHasAudio(hasAudio);
1008     }
1009
1010 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1011     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1012     if (legibleGroup) {
1013         RetainPtr<CFArrayRef> playableOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1014         hasCaptions = CFArrayGetCount(playableOptions.get());
1015         if (hasCaptions)
1016             processMediaSelectionOptions();
1017     }
1018 #endif
1019
1020 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1021     if (haveCCTrack)
1022         processLegacyClosedCaptionsTracks();
1023 #endif
1024
1025     setHasClosedCaptions(hasCaptions);
1026
1027     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s", 
1028         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1029
1030     sizeChanged();
1031
1032     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1033         characteristicsChanged();
1034
1035     setDelayCharacteristicsChangedNotification(false);
1036 }
1037
1038 void MediaPlayerPrivateAVFoundationCF::sizeChanged()
1039 {
1040     if (!avAsset(m_avfWrapper))
1041         return;
1042     
1043     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1044     // the union of all visual track rects.
1045     CGRect trackRectUnion = CGRectZero;
1046     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
1047     CFIndex trackCount = CFArrayGetCount(tracks.get());
1048     for (CFIndex i = 0; i < trackCount; i++) {
1049         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1050         
1051         CGSize trackSize = AVCFAssetTrackGetNaturalSize(assetTrack);
1052         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1053         trackRectUnion = CGRectUnion(trackRectUnion, CGRectApplyAffineTransform(trackRect, AVCFAssetTrackGetPreferredTransform(assetTrack)));
1054     }
1055     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1056     trackRectUnion = CGRectOffset(trackRectUnion, trackRectUnion.origin.x, trackRectUnion.origin.y);
1057     CGSize naturalSize = trackRectUnion.size;
1058
1059     if (!naturalSize.height && !naturalSize.width && avPlayerItem(m_avfWrapper))
1060         naturalSize = AVCFPlayerItemGetPresentationSize(avPlayerItem(m_avfWrapper));
1061
1062     // Also look at the asset's preferred transform so we account for a movie matrix.
1063     CGSize movieSize = CGSizeApplyAffineTransform(AVCFAssetGetNaturalSize(avAsset(m_avfWrapper)), AVCFAssetGetPreferredTransform(avAsset(m_avfWrapper)));
1064     if (movieSize.width > naturalSize.width)
1065         naturalSize.width = movieSize.width;
1066     if (movieSize.height > naturalSize.height)
1067         naturalSize.height = movieSize.height;
1068     setNaturalSize(IntSize(naturalSize));
1069 }
1070
1071 bool MediaPlayerPrivateAVFoundationCF::requiresImmediateCompositing() const
1072 {
1073     // The AVFoundationCF player needs to have the root compositor available at construction time
1074     // so it can attach to the rendering device. Otherwise it falls back to CPU-only mode.
1075     //
1076     // It would be nice if AVCFPlayer had some way to switch to hardware-accelerated mode
1077     // when asked, then we could follow AVFoundation's model and switch to compositing
1078     // mode when beginning to play media.
1079     return true;
1080 }
1081
1082 #if ENABLE(ENCRYPTED_MEDIA_V2)
1083 RetainPtr<AVCFAssetResourceLoadingRequestRef> MediaPlayerPrivateAVFoundationCF::takeRequestForKeyURI(const String& keyURI)
1084 {
1085     if (!m_avfWrapper)
1086         return nullptr;
1087
1088     return m_avfWrapper->takeRequestForKeyURI(keyURI);
1089 }
1090
1091 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem)
1092 {
1093     if (!keySystemIsSupported(keySystem))
1094         return nullptr;
1095
1096     return std::make_unique<CDMSessionAVFoundationCF>(this);
1097 }
1098 #endif
1099
1100 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1101 void MediaPlayerPrivateAVFoundationCF::processLegacyClosedCaptionsTracks()
1102 {
1103 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1104     AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), 0, safeMediaSelectionGroupForLegibleMedia(m_avfWrapper));
1105 #endif
1106
1107     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1108     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1109     CFIndex trackCount = CFArrayGetCount(tracks.get());
1110     for (CFIndex i = 0; i < trackCount; ++i) {
1111         AVCFPlayerItemTrackRef playerItemTrack = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1112
1113         RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(playerItemTrack));
1114         if (!assetTrack) {
1115             // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1116             LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1117             continue;
1118         }
1119         CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1120         if (!mediaType)
1121             continue;
1122                 
1123         if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) != kCFCompareEqualTo)
1124             continue;
1125
1126         bool newCCTrack = true;
1127         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1128             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1129                 continue;
1130
1131             RefPtr<InbandTextTrackPrivateLegacyAVCF> track = static_cast<InbandTextTrackPrivateLegacyAVCF*>(m_textTracks[i - 1].get());
1132             if (track->avPlayerItemTrack() == playerItemTrack) {
1133                 removedTextTracks.remove(i - 1);
1134                 newCCTrack = false;
1135                 break;
1136             }
1137         }
1138
1139         if (!newCCTrack)
1140             continue;
1141         
1142         m_textTracks.append(InbandTextTrackPrivateLegacyAVCF::create(this, playerItemTrack));
1143     }
1144
1145     processNewAndRemovedTextTracks(removedTextTracks);
1146 }
1147 #endif
1148
1149 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1150 void MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions()
1151 {
1152     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1153     if (!legibleGroup) {
1154         LOG(Media, "MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1155         return;
1156     }
1157
1158     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1159     // but set the selected legible track to nil so text tracks will not be automatically configured.
1160     if (!m_textTracks.size()) {
1161         ASSERT(AVCFMediaSelectionGroupAllowsEmptySelection(legibleGroup));
1162         AVCFPlayerItemRef playerItem = avPlayerItem(m_avfWrapper);
1163
1164         if (playerItem)
1165             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(playerItem, 0, legibleGroup);
1166     }
1167
1168     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1169     RetainPtr<CFArrayRef> legibleOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1170     CFIndex legibleOptionsCount = CFArrayGetCount(legibleOptions.get());
1171     for (CFIndex i = 0; i < legibleOptionsCount; ++i) {
1172         AVCFMediaSelectionOptionRef option = static_cast<AVCFMediaSelectionOptionRef>(CFArrayGetValueAtIndex(legibleOptions.get(), i));
1173         bool newTrack = true;
1174         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1175             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1176                 continue;
1177
1178             RefPtr<InbandTextTrackPrivateAVCF> track = static_cast<InbandTextTrackPrivateAVCF*>(removedTextTracks[i - 1].get());
1179             if (CFEqual(track->mediaSelectionOption(), option)) {
1180                 removedTextTracks.remove(i - 1);
1181                 newTrack = false;
1182                 break;
1183             }
1184         }
1185         if (!newTrack)
1186             continue;
1187
1188         m_textTracks.append(InbandTextTrackPrivateAVCF::create(this, option, InbandTextTrackPrivate::Generic));
1189     }
1190
1191     processNewAndRemovedTextTracks(removedTextTracks);
1192 }
1193
1194 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1195
1196 void AVFWrapper::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
1197 {
1198     if (m_currentTextTrack == track)
1199         return;
1200
1201     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1202         
1203     m_currentTextTrack = track;
1204
1205     if (track) {
1206         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1207             AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), TRUE);
1208 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1209         else
1210             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), static_cast<InbandTextTrackPrivateAVCF*>(track)->mediaSelectionOption(), safeMediaSelectionGroupForLegibleMedia());
1211 #endif
1212     } else {
1213 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1214         AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), 0, safeMediaSelectionGroupForLegibleMedia());
1215 #endif
1216         AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), FALSE);
1217     }
1218 }
1219
1220 String MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack() const
1221 {
1222     if (!m_languageOfPrimaryAudioTrack.isNull())
1223         return m_languageOfPrimaryAudioTrack;
1224
1225     if (!avPlayerItem(m_avfWrapper))
1226         return emptyString();
1227
1228 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1229     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1230     AVCFMediaSelectionGroupRef audibleGroup = AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible);
1231     AVCFMediaSelectionOptionRef currentlySelectedAudibleOption = AVCFPlayerItemGetSelectedMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), audibleGroup);
1232     if (currentlySelectedAudibleOption) {
1233         RetainPtr<CFLocaleRef> audibleOptionLocale = adoptCF(AVCFMediaSelectionOptionCopyLocale(currentlySelectedAudibleOption));
1234         m_languageOfPrimaryAudioTrack = CFLocaleGetIdentifier(audibleOptionLocale.get());
1235         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1236
1237         return m_languageOfPrimaryAudioTrack;
1238     }
1239 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1240
1241     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1242     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1243     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeAudio));
1244     CFIndex trackCount = CFArrayGetCount(tracks.get());
1245     if (!tracks || trackCount != 1) {
1246         m_languageOfPrimaryAudioTrack = emptyString();
1247         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? trackCount : 0));
1248         return m_languageOfPrimaryAudioTrack;
1249     }
1250
1251     AVCFAssetTrackRef track = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), 0);
1252     RetainPtr<CFStringRef> language = adoptCF(AVCFAssetTrackCopyExtendedLanguageTag(track));
1253
1254     // If the language code is stored as a QuickTime 5-bit packed code there aren't enough bits for a full
1255     // RFC 4646 language tag so extendedLanguageTag returns null. In this case languageCode will return the
1256     // ISO 639-2/T language code so check it.
1257     if (!language)
1258         language = adoptCF(AVCFAssetTrackCopyLanguageCode(track));
1259
1260     // Some legacy tracks have "und" as a language, treat that the same as no language at all.
1261     if (language && CFStringCompare(language.get(), CFSTR("und"), kCFCompareCaseInsensitive) != kCFCompareEqualTo) {
1262         m_languageOfPrimaryAudioTrack = language.get();
1263         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1264         return m_languageOfPrimaryAudioTrack;
1265     }
1266
1267     LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1268     m_languageOfPrimaryAudioTrack = emptyString();
1269     return m_languageOfPrimaryAudioTrack;
1270 }
1271
1272 void MediaPlayerPrivateAVFoundationCF::contentsNeedsDisplay()
1273 {
1274     if (m_avfWrapper)
1275         m_avfWrapper->setVideoLayerNeedsCommit();
1276 }
1277
1278 AVFWrapper::AVFWrapper(MediaPlayerPrivateAVFoundationCF* owner)
1279     : m_owner(owner)
1280     , m_objectID(s_nextAVFWrapperObjectID++)
1281     , m_currentTextTrack(0)
1282 {
1283     ASSERT(isMainThread());
1284     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1285     LOG(Media, "AVFWrapper::AVFWrapper(%p)", this);
1286
1287     m_notificationQueue = dispatch_queue_create("MediaPlayerPrivateAVFoundationCF.notificationQueue", 0);
1288     addToMap();
1289 }
1290
1291 AVFWrapper::~AVFWrapper()
1292 {
1293     ASSERT(isMainThread());
1294     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1295     LOG(Media, "AVFWrapper::~AVFWrapper(%p %d)", this, m_objectID);
1296
1297     destroyVideoLayer();
1298     destroyImageGenerator();
1299
1300     if (m_notificationQueue)
1301         dispatch_release(m_notificationQueue);
1302
1303     if (avAsset()) {
1304         AVCFAssetCancelLoading(avAsset());
1305         m_avAsset = 0;
1306     }
1307
1308 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1309     if (legibleOutput()) {
1310         if (avPlayerItem())
1311             AVCFPlayerItemRemoveOutput(avPlayerItem(), legibleOutput());
1312         m_legibleOutput = 0;
1313     }
1314 #endif
1315
1316     m_avPlayerItem = 0;
1317     m_timeObserver = 0;
1318     m_avPlayer = 0;
1319 }
1320
1321 Mutex& AVFWrapper::mapLock()
1322 {
1323     static Mutex mapLock;
1324     return mapLock;
1325 }
1326
1327 HashMap<uintptr_t, AVFWrapper*>& AVFWrapper::map()
1328 {
1329     static HashMap<uintptr_t, AVFWrapper*>& map = *new HashMap<uintptr_t, AVFWrapper*>;
1330     return map;
1331 }
1332
1333 void AVFWrapper::addToMap()
1334 {
1335     MutexLocker locker(mapLock());
1336     
1337     // HashMap doesn't like a key of 0, and also make sure we aren't
1338     // using an object ID that's already in use.
1339     while (!m_objectID || (map().find(m_objectID) != map().end()))
1340         m_objectID = s_nextAVFWrapperObjectID++;
1341        
1342     LOG(Media, "AVFWrapper::addToMap(%p %d)", this, m_objectID);
1343
1344     map().add(m_objectID, this);
1345 }
1346
1347 void AVFWrapper::removeFromMap() const
1348 {
1349     LOG(Media, "AVFWrapper::removeFromMap(%p %d)", this, m_objectID);
1350
1351     MutexLocker locker(mapLock());
1352     map().remove(m_objectID);
1353 }
1354
1355 AVFWrapper* AVFWrapper::avfWrapperForCallbackContext(void* context)
1356 {
1357     // Assumes caller has locked mapLock().
1358     HashMap<uintptr_t, AVFWrapper*>::iterator it = map().find(reinterpret_cast<uintptr_t>(context));
1359     if (it == map().end())
1360         return 0;
1361
1362     return it->value;
1363 }
1364
1365 void AVFWrapper::scheduleDisconnectAndDelete()
1366 {
1367     // Ignore any subsequent notifications we might receive in notificationCallback().
1368     removeFromMap();
1369
1370     dispatch_async_f(dispatchQueue(), this, disconnectAndDeleteAVFWrapper);
1371 }
1372
1373 static void destroyAVFWrapper(void* context)
1374 {
1375     ASSERT(isMainThread());
1376     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1377     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1378     if (!avfWrapper)
1379         return;
1380
1381     delete avfWrapper;
1382 }
1383
1384 void AVFWrapper::disconnectAndDeleteAVFWrapper(void* context)
1385 {
1386     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1387
1388     LOG(Media, "AVFWrapper::disconnectAndDeleteAVFWrapper(%p)", avfWrapper);
1389
1390     if (avfWrapper->avPlayerItem()) {
1391         CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1392         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDidPlayToEndTimeNotification, avfWrapper->avPlayerItem());
1393         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemStatusChangedNotification, avfWrapper->avPlayerItem());
1394         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemTracksChangedNotification, avfWrapper->avPlayerItem());
1395         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemSeekableTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1396         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemLoadedTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1397         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemPresentationSizeChangedNotification, avfWrapper->avPlayerItem());
1398         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, avfWrapper->avPlayerItem());
1399         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, avfWrapper->avPlayerItem());
1400         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferFullChangedNotification, avfWrapper->avPlayerItem());
1401         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDurationChangedNotification, avfWrapper->avPlayerItem());
1402         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), CACFContextNeedsFlushNotification(), 0);
1403     }
1404
1405     if (avfWrapper->avPlayer()) {
1406         if (avfWrapper->timeObserver())
1407             AVCFPlayerRemoveObserver(avfWrapper->avPlayer(), avfWrapper->timeObserver());
1408
1409         CFNotificationCenterRemoveObserver(CFNotificationCenterGetLocalCenter(), avfWrapper->callbackContext(), AVCFPlayerRateChangedNotification, avfWrapper->avPlayer());
1410     }
1411
1412 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1413     AVCFPlayerItemRemoveOutput(avfWrapper->avPlayerItem(), avfWrapper->legibleOutput());
1414 #endif
1415
1416     // We must release the AVCFPlayer and other items on the same thread that created them.
1417     dispatch_async_f(dispatch_get_main_queue(), context, destroyAVFWrapper);
1418 }
1419
1420 void AVFWrapper::createAssetForURL(const String& url, bool inheritURI)
1421 {
1422     ASSERT(!avAsset());
1423
1424     RetainPtr<CFURLRef> urlRef = URL(ParsedURLString, url).createCFURL();
1425
1426     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1427
1428     if (inheritURI)
1429         CFDictionarySetValue(optionsRef.get(), AVCFURLAssetInheritURIQueryComponentFromReferencingURIKey, kCFBooleanTrue);
1430
1431     m_avAsset = adoptCF(AVCFURLAssetCreateWithURLAndOptions(kCFAllocatorDefault, urlRef.get(), optionsRef.get(), m_notificationQueue));
1432
1433 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1434     AVCFAssetResourceLoaderCallbacks loaderCallbacks;
1435     loaderCallbacks.version = kAVCFAssetResourceLoader_CallbacksVersion_1;
1436     ASSERT(callbackContext());
1437     loaderCallbacks.context = callbackContext();
1438     loaderCallbacks.resourceLoaderShouldWaitForLoadingOfRequestedResource = AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource;
1439
1440     AVCFAssetResourceLoaderRef resourceLoader = AVCFURLAssetGetResourceLoader(m_avAsset.get());
1441     AVCFAssetResourceLoaderSetCallbacks(resourceLoader, &loaderCallbacks, globalLoaderDelegateQueue());
1442 #endif
1443 }
1444
1445 void AVFWrapper::createPlayer(IDirect3DDevice9* d3dDevice)
1446 {
1447     ASSERT(isMainThread());
1448     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1449     ASSERT(avPlayerItem());
1450
1451     if (avPlayer())
1452         return;
1453
1454     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1455
1456     if (d3dDevice) {
1457         // QI for an IDirect3DDevice9Ex interface, it is required to do HW video decoding.
1458         COMPtr<IDirect3DDevice9Ex> d3dEx(Query, d3dDevice);
1459         m_d3dDevice = d3dEx;
1460     } else
1461         m_d3dDevice = 0;
1462
1463     if (m_d3dDevice && AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey)
1464         CFDictionarySetValue(optionsRef.get(), AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey, kCFBooleanTrue);
1465
1466 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1467     CFDictionarySetValue(optionsRef.get(), AVCFPlayerAppliesMediaSelectionCriteriaAutomaticallyKey, kCFBooleanTrue);
1468 #endif
1469
1470     // FIXME: We need a way to create a AVPlayer without an AVPlayerItem, see <rdar://problem/9877730>.
1471     AVCFPlayerRef playerRef = AVCFPlayerCreateWithPlayerItemAndOptions(kCFAllocatorDefault, avPlayerItem(), optionsRef.get(), m_notificationQueue);
1472     m_avPlayer = adoptCF(playerRef);
1473 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1474     AVCFPlayerSetClosedCaptionDisplayEnabled(playerRef, FALSE);
1475 #endif
1476
1477     if (m_d3dDevice && AVCFPlayerSetDirect3DDevicePtr())
1478         AVCFPlayerSetDirect3DDevicePtr()(playerRef, m_d3dDevice.get());
1479
1480     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1481     ASSERT(center);
1482
1483     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerRateChangedNotification, playerRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1484
1485     // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but
1486     // our observer will also be called whenever a seek happens.
1487     const double veryLongInterval = 60*60*60*24*30;
1488     m_timeObserver = adoptCF(AVCFPlayerCreatePeriodicTimeObserverForInterval(playerRef, CMTimeMake(veryLongInterval, 10), m_notificationQueue, &periodicTimeObserverCallback, callbackContext()));
1489 }
1490
1491 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1492 static RetainPtr<CFArrayRef> createLegibleOutputSubtypes()
1493 {
1494     int webVTTInt = 'wvtt'; // kCMSubtitleFormatType_WebVTT;
1495     RetainPtr<CFNumberRef> webVTTNumber = adoptCF(CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &webVTTInt));
1496     CFTypeRef formatTypes[] = { webVTTNumber.get() };
1497     return adoptCF(CFArrayCreate(0, formatTypes, WTF_ARRAY_LENGTH(formatTypes), &kCFTypeArrayCallBacks));
1498 }
1499 #endif
1500
1501 void AVFWrapper::createPlayerItem()
1502 {
1503     ASSERT(isMainThread());
1504     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1505     ASSERT(avAsset());
1506
1507     if (avPlayerItem())
1508         return;
1509
1510     // Create the player item so we begin loading media data.
1511     AVCFPlayerItemRef itemRef = AVCFPlayerItemCreateWithAsset(kCFAllocatorDefault, avAsset(), m_notificationQueue);
1512     m_avPlayerItem = adoptCF(itemRef);
1513
1514     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1515     ASSERT(center);
1516
1517     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDidPlayToEndTimeNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1518     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemStatusChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1519     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemTracksChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1520     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemSeekableTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1521     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemLoadedTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1522     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemPresentationSizeChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1523     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1524     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1525     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferFullChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1526     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDurationChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1527     // FIXME: Are there other legible output things we need to register for? asset and hasEnabledAudio are not exposed by AVCF
1528
1529     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, CACFContextNeedsFlushNotification(), 0, CFNotificationSuspensionBehaviorDeliverImmediately);
1530
1531 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1532     const CFTimeInterval legibleOutputAdvanceInterval = 2;
1533
1534     m_legibleOutput = adoptCF(AVCFPlayerItemLegibleOutputCreateWithMediaSubtypesForNativeRepresentation(kCFAllocatorDefault, createLegibleOutputSubtypes().get()));
1535     AVCFPlayerItemOutputSetSuppressPlayerRendering(m_legibleOutput.get(), TRUE);
1536
1537     AVCFPlayerItemLegibleOutputCallbacks callbackInfo;
1538     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_1;
1539     ASSERT(callbackContext());
1540     callbackInfo.context = callbackContext();
1541     callbackInfo.legibleOutputCallback = AVFWrapper::legibleOutputCallback;
1542
1543     AVCFPlayerItemLegibleOutputSetCallbacks(m_legibleOutput.get(), &callbackInfo, dispatchQueue());
1544     AVCFPlayerItemLegibleOutputSetAdvanceIntervalForCallbackInvocation(m_legibleOutput.get(), legibleOutputAdvanceInterval);
1545     AVCFPlayerItemLegibleOutputSetTextStylingResolution(m_legibleOutput.get(), AVCFPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly);
1546     AVCFPlayerItemAddOutput(m_avPlayerItem.get(), m_legibleOutput.get());
1547 #endif
1548 }
1549
1550 void AVFWrapper::periodicTimeObserverCallback(AVCFPlayerRef, CMTime cmTime, void* context)
1551 {
1552     MutexLocker locker(mapLock());
1553     AVFWrapper* self = avfWrapperForCallbackContext(context);
1554     if (!self) {
1555         LOG(Media, "AVFWrapper::periodicTimeObserverCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1556         return;
1557     }
1558
1559     double time = std::max(0.0, CMTimeGetSeconds(cmTime)); // Clamp to zero, negative values are sometimes reported.
1560     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time);
1561 }
1562
1563 struct NotificationCallbackData {
1564     RetainPtr<CFStringRef> m_propertyName;
1565     void* m_context;
1566
1567     NotificationCallbackData(CFStringRef propertyName, void* context)
1568         : m_propertyName(propertyName), m_context(context)
1569     {
1570     }
1571 };
1572
1573 void AVFWrapper::processNotification(void* context)
1574 {
1575     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1576     ASSERT(context);
1577
1578     if (!context)
1579         return;
1580
1581     OwnPtr<NotificationCallbackData> notificationData = adoptPtr(reinterpret_cast<NotificationCallbackData*>(context));
1582
1583     MutexLocker locker(mapLock());
1584     AVFWrapper* self = avfWrapperForCallbackContext(notificationData->m_context);
1585     if (!self) {
1586         LOG(Media, "AVFWrapper::processNotification invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1587         return;
1588     }
1589
1590     CFStringRef propertyName = notificationData->m_propertyName.get();
1591
1592     if (CFEqual(propertyName, AVCFPlayerItemDidPlayToEndTimeNotification))
1593         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
1594     else if (CFEqual(propertyName, AVCFPlayerItemTracksChangedNotification))
1595         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1596     else if (CFEqual(propertyName, AVCFPlayerItemStatusChangedNotification)) {
1597         AVCFURLAssetRef asset = AVCFPlayerItemGetAsset(self->avPlayerItem());
1598         if (asset)
1599             self->setAsset(asset);
1600         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
1601     } else if (CFEqual(propertyName, AVCFPlayerItemSeekableTimeRangesChangedNotification))
1602         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
1603     else if (CFEqual(propertyName, AVCFPlayerItemLoadedTimeRangesChangedNotification))
1604         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
1605     else if (CFEqual(propertyName, AVCFPlayerItemPresentationSizeChangedNotification))
1606         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
1607     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification))
1608         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
1609     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification))
1610         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
1611     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferFullChangedNotification))
1612         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
1613     else if (CFEqual(propertyName, AVCFPlayerRateChangedNotification))
1614         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
1615     else if (CFEqual(propertyName, CACFContextNeedsFlushNotification()))
1616         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ContentsNeedsDisplay);
1617     else if (CFEqual(propertyName, AVCFPlayerItemDurationChangedNotification))
1618         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged);
1619     else
1620         ASSERT_NOT_REACHED();
1621 }
1622
1623 void AVFWrapper::notificationCallback(CFNotificationCenterRef, void* observer, CFStringRef propertyName, const void* object, CFDictionaryRef)
1624 {
1625 #if !LOG_DISABLED
1626     char notificationName[256];
1627     CFStringGetCString(propertyName, notificationName, sizeof(notificationName), kCFStringEncodingASCII);
1628     LOG(Media, "AVFWrapper::notificationCallback(if=%d) %s", reinterpret_cast<uintptr_t>(observer), notificationName);
1629 #endif
1630
1631     OwnPtr<NotificationCallbackData> notificationData = adoptPtr(new NotificationCallbackData(propertyName, observer));
1632
1633     dispatch_async_f(dispatch_get_main_queue(), notificationData.leakPtr(), processNotification);
1634 }
1635
1636 void AVFWrapper::loadPlayableCompletionCallback(AVCFAssetRef, void* context)
1637 {
1638     MutexLocker locker(mapLock());
1639     AVFWrapper* self = avfWrapperForCallbackContext(context);
1640     if (!self) {
1641         LOG(Media, "AVFWrapper::loadPlayableCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1642         return;
1643     }
1644
1645     LOG(Media, "AVFWrapper::loadPlayableCompletionCallback(%p)", self);
1646     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1647 }
1648
1649 void AVFWrapper::checkPlayability()
1650 {
1651     LOG(Media, "AVFWrapper::checkPlayability(%p)", this);
1652
1653     static CFArrayRef propertyKeyName;
1654     if (!propertyKeyName) {
1655         static const CFStringRef keyNames[] = { 
1656             AVCFAssetPropertyPlayable
1657         };
1658         propertyKeyName = CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
1659     }
1660
1661     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), propertyKeyName, loadPlayableCompletionCallback, callbackContext());
1662 }
1663
1664 void AVFWrapper::loadMetadataCompletionCallback(AVCFAssetRef, void* context)
1665 {
1666     MutexLocker locker(mapLock());
1667     AVFWrapper* self = avfWrapperForCallbackContext(context);
1668     if (!self) {
1669         LOG(Media, "AVFWrapper::loadMetadataCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1670         return;
1671     }
1672
1673     LOG(Media, "AVFWrapper::loadMetadataCompletionCallback(%p)", self);
1674     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
1675 }
1676
1677 void AVFWrapper::beginLoadingMetadata()
1678 {
1679     ASSERT(avAsset());
1680     LOG(Media, "AVFWrapper::beginLoadingMetadata(%p) - requesting metadata loading", this);
1681     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), metadataKeyNames(), loadMetadataCompletionCallback, callbackContext());
1682 }
1683
1684 void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void* context)
1685 {
1686     MutexLocker locker(mapLock());
1687     AVFWrapper* self = avfWrapperForCallbackContext(context);
1688     if (!self) {
1689         LOG(Media, "AVFWrapper::seekCompletedCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1690         return;
1691     }
1692
1693     LOG(Media, "AVFWrapper::seekCompletedCallback(%p)", self);
1694     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
1695 }
1696
1697 void AVFWrapper::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1698 {
1699     ASSERT(avPlayerItem());
1700     CMTime cmTime = toCMTime(time);
1701     CMTime cmBefore = toCMTime(negativeTolerance);
1702     CMTime cmAfter = toCMTime(positiveTolerance);
1703     AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), cmTime, cmBefore, cmAfter, &seekCompletedCallback, callbackContext());
1704 }
1705
1706 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1707 struct LegibleOutputData {
1708     RetainPtr<CFArrayRef> m_attributedStrings;
1709     RetainPtr<CFArrayRef> m_samples;
1710     MediaTime m_time;
1711     void* m_context;
1712
1713     LegibleOutputData(CFArrayRef strings, CFArrayRef samples, const MediaTime &time, void* context)
1714         : m_attributedStrings(strings), m_samples(samples), m_time(time), m_context(context)
1715     {
1716     }
1717 };
1718
1719 void AVFWrapper::processCue(void* context)
1720 {
1721     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1722     ASSERT(context);
1723
1724     if (!context)
1725         return;
1726
1727     std::unique_ptr<LegibleOutputData> legibleOutputData(reinterpret_cast<LegibleOutputData*>(context));
1728
1729     MutexLocker locker(mapLock());
1730     AVFWrapper* self = avfWrapperForCallbackContext(legibleOutputData->m_context);
1731     if (!self) {
1732         LOG(Media, "AVFWrapper::processCue invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1733         return;
1734     }
1735
1736     if (!self->m_currentTextTrack)
1737         return;
1738
1739     self->m_currentTextTrack->processCue(legibleOutputData->m_attributedStrings.get(), legibleOutputData->m_samples.get(), legibleOutputData->m_time);
1740 }
1741
1742 void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef legibleOutput, CFArrayRef attributedStrings, CFArrayRef nativeSampleBuffers, CMTime itemTime)
1743 {
1744     ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue());
1745     MutexLocker locker(mapLock());
1746     AVFWrapper* self = avfWrapperForCallbackContext(context);
1747     if (!self) {
1748         LOG(Media, "AVFWrapper::legibleOutputCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1749         return;
1750     }
1751
1752     LOG(Media, "AVFWrapper::legibleOutputCallback(%p)", self);
1753
1754     ASSERT(legibleOutput == self->m_legibleOutput);
1755
1756     auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, toMediaTime(itemTime), context);
1757
1758     dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.release(), processCue);
1759 }
1760 #endif
1761
1762 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1763 struct LoadRequestData {
1764     RetainPtr<AVCFAssetResourceLoadingRequestRef> m_request;
1765     void* m_context;
1766
1767     LoadRequestData(AVCFAssetResourceLoadingRequestRef request, void* context)
1768         : m_request(request), m_context(context)
1769     {
1770     }
1771 };
1772
1773 void AVFWrapper::processShouldWaitForLoadingOfResource(void* context)
1774 {
1775     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1776     ASSERT(context);
1777
1778     if (!context)
1779         return;
1780
1781     std::unique_ptr<LoadRequestData> loadRequestData(reinterpret_cast<LoadRequestData*>(context));
1782
1783     MutexLocker locker(mapLock());
1784     AVFWrapper* self = avfWrapperForCallbackContext(loadRequestData->m_context);
1785     if (!self) {
1786         LOG(Media, "AVFWrapper::processShouldWaitForLoadingOfResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1787         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), nullptr);
1788         return;
1789     }
1790
1791     if (!self->shouldWaitForLoadingOfResource(loadRequestData->m_request.get()))
1792         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), nullptr);
1793 }
1794
1795 bool AVFWrapper::shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest)
1796 {
1797 #if ENABLE(ENCRYPTED_MEDIA_V2)
1798     RetainPtr<CFURLRequestRef> urlRequest = AVCFAssetResourceLoadingRequestGetURLRequest(avRequest);
1799     RetainPtr<CFURLRef> requestURL = CFURLRequestGetURL(urlRequest.get());
1800     RetainPtr<CFStringRef> schemeRef = adoptCF(CFURLCopyScheme(requestURL.get()));
1801     String scheme = schemeRef.get();
1802
1803     if (scheme == "skd") {
1804         RetainPtr<CFURLRef> absoluteURL = adoptCF(CFURLCopyAbsoluteURL(requestURL.get()));
1805         RetainPtr<CFStringRef> keyURIRef = CFURLGetString(absoluteURL.get());
1806         String keyURI = keyURIRef.get();
1807
1808         // Create an initData with the following layout:
1809         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1810         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1811         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1812         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1813         initDataView->set<uint32_t>(0, keyURISize, true);
1814
1815         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1816         keyURIArray->setRange(reinterpret_cast<const uint16_t*>(StringView(keyURI).upconvertedCharacters().get()), keyURI.length() / sizeof(unsigned char), 0);
1817
1818         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1819         if (!m_owner->player()->keyNeeded(initData.get()))
1820             return false;
1821
1822         m_keyURIToRequestMap.set(keyURI, avRequest);
1823         return true;
1824     }
1825 #endif
1826
1827     RefPtr<WebCoreAVCFResourceLoader> resourceLoader = WebCoreAVCFResourceLoader::create(m_owner, avRequest);
1828     m_owner->m_resourceLoaderMap.add(avRequest, resourceLoader);
1829     resourceLoader->startLoading();
1830     return true;
1831 }
1832
1833 Boolean AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef resourceLoader, AVCFAssetResourceLoadingRequestRef loadingRequest, void *context)
1834 {
1835     ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue());
1836     MutexLocker locker(mapLock());
1837     AVFWrapper* self = avfWrapperForCallbackContext(context);
1838     if (!self) {
1839         LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1840         return false;
1841     }
1842
1843     LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(%p)", self);
1844
1845     auto loadRequestData = std::make_unique<LoadRequestData>(loadingRequest, context);
1846
1847     dispatch_async_f(dispatch_get_main_queue(), loadRequestData.release(), processShouldWaitForLoadingOfResource);
1848
1849     return true;
1850 }
1851 #endif
1852
1853 void AVFWrapper::setAsset(AVCFURLAssetRef asset)
1854 {
1855     if (asset == avAsset())
1856         return;
1857
1858     AVCFAssetCancelLoading(avAsset());
1859     m_avAsset = asset;
1860 }
1861
1862 PlatformLayer* AVFWrapper::platformLayer()
1863 {
1864     if (m_videoLayerWrapper)
1865         return m_videoLayerWrapper->platformLayer();
1866
1867     if (!videoLayer())
1868         return 0;
1869
1870     // Create a PlatformCALayer so we can resize the video layer to match the element size.
1871     m_layerClient = adoptPtr(new LayerClient(this));
1872     if (!m_layerClient)
1873         return 0;
1874
1875     m_videoLayerWrapper = PlatformCALayerWin::create(PlatformCALayer::LayerTypeLayer, m_layerClient.get());
1876     if (!m_videoLayerWrapper)
1877         return 0;
1878
1879     m_caVideoLayer = adoptCF(AVCFPlayerLayerCopyCACFLayer(m_avCFVideoLayer.get()));
1880
1881     CACFLayerInsertSublayer(m_videoLayerWrapper->platformLayer(), m_caVideoLayer.get(), 0);
1882     m_videoLayerWrapper->setAnchorPoint(FloatPoint3D());
1883     m_videoLayerWrapper->setNeedsLayout();
1884     updateVideoLayerGravity();
1885
1886     return m_videoLayerWrapper->platformLayer();
1887 }
1888
1889 void AVFWrapper::createAVCFVideoLayer()
1890 {
1891     ASSERT(isMainThread());
1892     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1893     if (!avPlayer() || m_avCFVideoLayer)
1894         return;
1895
1896     // The layer will get hooked up via RenderLayerBacking::updateConfiguration().
1897     m_avCFVideoLayer = adoptCF(AVCFPlayerLayerCreateWithAVCFPlayer(kCFAllocatorDefault, avPlayer(), m_notificationQueue));
1898     LOG(Media, "AVFWrapper::createAVCFVideoLayer(%p) - returning %p", this, videoLayer());
1899 }
1900
1901 void AVFWrapper::destroyVideoLayer()
1902 {
1903     ASSERT(isMainThread());
1904     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1905     LOG(Media, "AVFWrapper::destroyVideoLayer(%p)", this);
1906     m_layerClient = nullptr;
1907     m_caVideoLayer = 0;
1908     m_videoLayerWrapper = 0;
1909     if (!m_avCFVideoLayer.get())
1910         return;
1911
1912     AVCFPlayerLayerSetPlayer((AVCFPlayerLayerRef)m_avCFVideoLayer.get(), 0);
1913     m_avCFVideoLayer = 0;
1914 }
1915
1916 void AVFWrapper::setVideoLayerNeedsCommit()
1917 {
1918     if (m_videoLayerWrapper)
1919         m_videoLayerWrapper->setNeedsCommit();
1920 }
1921
1922 void AVFWrapper::setVideoLayerHidden(bool value)
1923 {
1924     if (m_videoLayerWrapper)
1925         m_videoLayerWrapper->setHidden(value);
1926 }
1927
1928 void AVFWrapper::createImageGenerator()
1929 {
1930     ASSERT(isMainThread());
1931     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1932     if (!avAsset() || m_imageGenerator)
1933         return;
1934
1935     m_imageGenerator = adoptCF(AVCFAssetImageGeneratorCreateWithAsset(kCFAllocatorDefault, avAsset()));
1936
1937     AVCFAssetImageGeneratorSetApertureMode(m_imageGenerator.get(), AVCFAssetImageGeneratorApertureModeCleanAperture);
1938     AVCFAssetImageGeneratorSetRequestedTimeToleranceBefore(m_imageGenerator.get(), kCMTimeZero);
1939     AVCFAssetImageGeneratorSetRequestedTimeToleranceAfter(m_imageGenerator.get(), kCMTimeZero);
1940     AVCFAssetImageGeneratorSetAppliesPreferredTrackTransform(m_imageGenerator.get(), true);
1941
1942     LOG(Media, "AVFWrapper::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
1943 }
1944
1945 void AVFWrapper::destroyImageGenerator()
1946 {
1947     ASSERT(isMainThread());
1948     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1949     LOG(Media, "AVFWrapper::destroyImageGenerator(%p)", this);
1950     m_imageGenerator = 0;
1951 }
1952
1953 RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(const MediaTime& time, const IntRect& rect)
1954 {
1955     if (!m_imageGenerator)
1956         return 0;
1957
1958 #if !LOG_DISABLED
1959     double start = monotonicallyIncreasingTime();
1960 #endif
1961
1962     AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size()));
1963     RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), toCMTime(time), 0, 0));
1964     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawimage.get(), adoptCF(CGColorSpaceCreateDeviceRGB()).get()));
1965
1966 #if !LOG_DISABLED
1967     double duration = monotonicallyIncreasingTime() - start;
1968     LOG(Media, "AVFWrapper::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1969 #endif
1970
1971     return image;
1972 }
1973
1974 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1975 AVCFMediaSelectionGroupRef AVFWrapper::safeMediaSelectionGroupForLegibleMedia() const
1976 {
1977     if (!avAsset())
1978         return 0;
1979
1980     if (AVCFAssetGetStatusOfValueForProperty(avAsset(), AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) != AVCFPropertyValueStatusLoaded)
1981         return 0;
1982
1983     return AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(), AVCFMediaCharacteristicLegible);
1984 }
1985 #endif
1986
1987 void AVFWrapper::updateVideoLayerGravity()
1988 {
1989     // We should call AVCFPlayerLayerSetVideoGravity() here, but it is not yet implemented.
1990     // FIXME: <rdar://problem/14884340>
1991 }
1992
1993 #if ENABLE(ENCRYPTED_MEDIA_V2)
1994 RetainPtr<AVCFAssetResourceLoadingRequestRef> AVFWrapper::takeRequestForKeyURI(const String& keyURI)
1995 {
1996     return m_keyURIToRequestMap.take(keyURI);
1997 }
1998 #endif
1999
2000 void LayerClient::platformCALayerLayoutSublayersOfLayer(PlatformCALayer* wrapperLayer)
2001 {
2002     ASSERT(m_parent);
2003     ASSERT(m_parent->videoLayerWrapper() == wrapperLayer->platformLayer());
2004
2005     CGRect bounds = wrapperLayer->bounds();
2006     CGPoint anchor = CACFLayerGetAnchorPoint(m_parent->caVideoLayer());
2007     FloatPoint position(bounds.size.width * anchor.x, bounds.size.height * anchor.y); 
2008
2009     CACFLayerSetPosition(m_parent->caVideoLayer(), position);
2010     CACFLayerSetBounds(m_parent->caVideoLayer(), bounds);
2011
2012     AVCFPlayerLayerSetFrame(m_parent->videoLayer(), CGRectMake(0, 0, bounds.size.width, bounds.size.height));
2013 }
2014
2015 } // namespace WebCore
2016
2017 #else
2018 // AVFoundation should always be enabled for Apple production builds.
2019 #if __PRODUCTION__ && !USE(AVFOUNDATION)
2020 #error AVFoundation is not enabled!
2021 #endif // __PRODUCTION__ && !USE(AVFOUNDATION)
2022 #endif // USE(AVFOUNDATION)
2023 #endif // PLATFORM(WIN) && ENABLE(VIDEO)