Unreviewed, roll out http://trac.webkit.org/changeset/187972.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / cf / MediaPlayerPrivateAVFoundationCF.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if PLATFORM(WIN) && ENABLE(VIDEO) 
29
30 #if USE(AVFOUNDATION)
31
32 #include "MediaPlayerPrivateAVFoundationCF.h"
33
34 #include "ApplicationCacheResource.h"
35 #include "CDMSessionAVFoundationCF.h"
36 #include "COMPtr.h"
37 #include "FloatConversion.h"
38 #include "GraphicsContext.h"
39 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
40 #include "InbandTextTrackPrivateAVCF.h"
41 #else
42 #include "InbandTextTrackPrivateLegacyAVCF.h"
43 #endif
44 #include "MediaTimeAVFoundation.h"
45 #include "URL.h"
46 #include "Logging.h"
47 #include "PlatformCALayerWin.h"
48 #include "TimeRanges.h"
49 #include "WebCoreAVCFResourceLoader.h"
50
51 #include <AVFoundationCF/AVCFPlayerItem.h>
52 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
53 #include <AVFoundationCF/AVCFPlayerItemLegibleOutput.h>
54 #endif
55 #include <AVFoundationCF/AVCFPlayerLayer.h>
56 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
57 #include <AVFoundationCF/AVCFAssetResourceLoader.h>
58 #endif
59 #include <AVFoundationCF/AVFoundationCF.h>
60 #include <d3d9.h>
61 #include <delayimp.h>
62 #include <dispatch/dispatch.h>
63 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
64 #include <runtime/DataView.h>
65 #include <runtime/Uint16Array.h>
66 #endif
67 #include <wtf/HashMap.h>
68 #include <wtf/NeverDestroyed.h>
69 #include <wtf/Threading.h>
70 #include <wtf/text/CString.h>
71 #include <wtf/text/StringView.h>
72 #include <wtf/StringPrintStream.h>
73
74 // Soft-linking headers must be included last since they #define functions, constants, etc.
75 #include "AVFoundationCFSoftLinking.h"
76 #include "CoreMediaSoftLink.h"
77
78 // We don't bother softlinking against libdispatch since it's already been loaded by AAS.
79 #ifdef DEBUG_ALL
80 #pragma comment(lib, "libdispatch_debug.lib")
81 #else
82 #pragma comment(lib, "libdispatch.lib")
83 #endif
84
85 using namespace std;
86
87 namespace WebCore {
88
89 class LayerClient;
90
91 class AVFWrapper {
92 public:
93     AVFWrapper(MediaPlayerPrivateAVFoundationCF*);
94     ~AVFWrapper();
95
96     void scheduleDisconnectAndDelete();
97
98     void createAVCFVideoLayer();
99     void destroyVideoLayer();
100     PlatformLayer* platformLayer();
101
102     CACFLayerRef caVideoLayer() { return m_caVideoLayer.get(); }
103     PlatformLayer* videoLayerWrapper() { return m_videoLayerWrapper ? m_videoLayerWrapper->platformLayer() : 0; };
104     void setVideoLayerNeedsCommit();
105     void setVideoLayerHidden(bool);
106
107     void createImageGenerator();
108     void destroyImageGenerator();
109     RetainPtr<CGImageRef> createImageForTimeInRect(const MediaTime&, const FloatRect&);
110
111     void createAssetForURL(const String& url, bool inheritURI);
112     void setAsset(AVCFURLAssetRef);
113     
114     void createPlayer(IDirect3DDevice9*);
115     void createPlayerItem();
116     
117     void checkPlayability();
118     void beginLoadingMetadata();
119     
120     void seekToTime(const MediaTime&, const MediaTime&, const MediaTime&);
121     void updateVideoLayerGravity();
122
123     void setCurrentTextTrack(InbandTextTrackPrivateAVF*);
124     InbandTextTrackPrivateAVF* currentTextTrack() const { return m_currentTextTrack; }
125
126 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
127     static void legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef, CFArrayRef attributedString, CFArrayRef nativeSampleBuffers, CMTime itemTime);
128     static void processCue(void* context);
129 #endif
130 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
131     static Boolean resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef, AVCFAssetResourceLoadingRequestRef, void* context);
132 #endif
133     static void loadMetadataCompletionCallback(AVCFAssetRef, void*);
134     static void loadPlayableCompletionCallback(AVCFAssetRef, void*);
135     static void periodicTimeObserverCallback(AVCFPlayerRef, CMTime, void*);
136     static void seekCompletedCallback(AVCFPlayerItemRef, Boolean, void*);
137     static void notificationCallback(CFNotificationCenterRef, void*, CFStringRef, const void*, CFDictionaryRef);
138     static void processNotification(void* context);
139
140     inline AVCFPlayerLayerRef videoLayer() const { return (AVCFPlayerLayerRef)m_avCFVideoLayer.get(); }
141     inline AVCFPlayerRef avPlayer() const { return (AVCFPlayerRef)m_avPlayer.get(); }
142     inline AVCFURLAssetRef avAsset() const { return (AVCFURLAssetRef)m_avAsset.get(); }
143     inline AVCFPlayerItemRef avPlayerItem() const { return (AVCFPlayerItemRef)m_avPlayerItem.get(); }
144     inline AVCFPlayerObserverRef timeObserver() const { return (AVCFPlayerObserverRef)m_timeObserver.get(); }
145     inline AVCFAssetImageGeneratorRef imageGenerator() const { return m_imageGenerator.get(); }
146 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
147     inline AVCFPlayerItemLegibleOutputRef legibleOutput() const { return m_legibleOutput.get(); }
148     AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia() const;
149 #endif
150     inline dispatch_queue_t dispatchQueue() const { return m_notificationQueue; }
151
152 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
153     RetainPtr<AVCFAssetResourceLoadingRequestRef> takeRequestForKeyURI(const String&);
154     void setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest);
155 #endif
156
157 private:
158     inline void* callbackContext() const { return reinterpret_cast<void*>(m_objectID); }
159
160     static Mutex& mapLock();
161     static HashMap<uintptr_t, AVFWrapper*>& map();
162     static AVFWrapper* avfWrapperForCallbackContext(void*);
163     void addToMap();
164     void removeFromMap() const;
165 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
166     bool shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest);
167     static void processShouldWaitForLoadingOfResource(void* context);
168 #endif
169
170     static void disconnectAndDeleteAVFWrapper(void*);
171
172     static uintptr_t s_nextAVFWrapperObjectID;
173     uintptr_t m_objectID;
174
175     MediaPlayerPrivateAVFoundationCF* m_owner;
176
177     RetainPtr<AVCFPlayerRef> m_avPlayer;
178     RetainPtr<AVCFURLAssetRef> m_avAsset;
179     RetainPtr<AVCFPlayerItemRef> m_avPlayerItem;
180     RetainPtr<AVCFPlayerLayerRef> m_avCFVideoLayer;
181     RetainPtr<AVCFPlayerObserverRef> m_timeObserver;
182     RetainPtr<AVCFAssetImageGeneratorRef> m_imageGenerator;
183 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
184     RetainPtr<AVCFPlayerItemLegibleOutputRef> m_legibleOutput;
185     RetainPtr<AVCFMediaSelectionGroupRef> m_selectionGroup;
186 #endif
187
188     dispatch_queue_t m_notificationQueue;
189
190     mutable RetainPtr<CACFLayerRef> m_caVideoLayer;
191     RefPtr<PlatformCALayer> m_videoLayerWrapper;
192
193     std::unique_ptr<LayerClient> m_layerClient;
194     COMPtr<IDirect3DDevice9Ex> m_d3dDevice;
195
196     InbandTextTrackPrivateAVF* m_currentTextTrack;
197
198 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
199     HashMap<String, Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>>> m_keyURIToRequestMap;
200     AVCFAssetResourceLoaderCallbacks m_resourceLoaderCallbacks;
201 #endif
202 };
203
204 uintptr_t AVFWrapper::s_nextAVFWrapperObjectID;
205
206 class LayerClient : public PlatformCALayerClient {
207 public:
208     LayerClient(AVFWrapper* parent) : m_parent(parent) { }
209     virtual ~LayerClient() { m_parent = 0; }
210
211 private:
212     virtual void platformCALayerLayoutSublayersOfLayer(PlatformCALayer*);
213     virtual bool platformCALayerRespondsToLayoutChanges() const { return true; }
214
215     virtual void platformCALayerAnimationStarted(CFTimeInterval beginTime) { }
216     virtual GraphicsLayer::CompositingCoordinatesOrientation platformCALayerContentsOrientation() const { return GraphicsLayer::CompositingCoordinatesBottomUp; }
217     virtual void platformCALayerPaintContents(PlatformCALayer*, GraphicsContext&, const FloatRect&) { }
218     virtual bool platformCALayerShowDebugBorders() const { return false; }
219     virtual bool platformCALayerShowRepaintCounter(PlatformCALayer*) const { return false; }
220     virtual int platformCALayerIncrementRepaintCount(PlatformCALayer*) { return 0; }
221
222     virtual bool platformCALayerContentsOpaque() const { return false; }
223     virtual bool platformCALayerDrawsContent() const { return false; }
224     virtual float platformCALayerDeviceScaleFactor() const { return 1; }
225
226     AVFWrapper* m_parent;
227 };
228
229 #if !LOG_DISABLED
230 static const char* boolString(bool val)
231 {
232     return val ? "true" : "false";
233 }
234 #endif
235
236 static CFArrayRef createMetadataKeyNames()
237 {
238     static const CFStringRef keyNames[] = {
239         AVCFAssetPropertyDuration,
240         AVCFAssetPropertyNaturalSize,
241         AVCFAssetPropertyPreferredTransform,
242         AVCFAssetPropertyPreferredRate,
243         AVCFAssetPropertyPlayable,
244         AVCFAssetPropertyTracks,
245 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
246         AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions,
247 #endif
248     };
249     
250     return CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
251 }
252
253 static CFArrayRef metadataKeyNames()
254 {
255     static CFArrayRef keys = createMetadataKeyNames();
256     return keys;
257 }
258
259 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
260 static CFStringRef CMTimeRangeStartKey()
261 {
262     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("start")));
263     return key;
264 }
265
266 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
267 static CFStringRef CMTimeRangeDurationKey()
268 {
269     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("duration")));
270     return key;
271 }
272
273 // FIXME: It would be better if AVCF exported this notification name.
274 static CFStringRef CACFContextNeedsFlushNotification()
275 {
276     DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, name, (CFSTR("kCACFContextNeedsFlushNotification")));
277     return name;
278 }
279
280 // Define AVCF object accessors as inline functions here instead of in MediaPlayerPrivateAVFoundationCF so we don't have
281 // to include the AVCF headers in MediaPlayerPrivateAVFoundationCF.h
282 inline AVCFPlayerLayerRef videoLayer(AVFWrapper* wrapper)
283
284     return wrapper ? wrapper->videoLayer() : 0; 
285 }
286
287 inline AVCFPlayerRef avPlayer(AVFWrapper* wrapper)
288
289     return wrapper ? wrapper->avPlayer() : 0; 
290 }
291
292 inline AVCFURLAssetRef avAsset(AVFWrapper* wrapper)
293
294     return wrapper ? wrapper->avAsset() : 0; 
295 }
296
297 inline AVCFPlayerItemRef avPlayerItem(AVFWrapper* wrapper)
298
299     return wrapper ? wrapper->avPlayerItem() : 0; 
300 }
301
302 inline AVCFAssetImageGeneratorRef imageGenerator(AVFWrapper* wrapper)
303
304     return wrapper ? wrapper->imageGenerator() : 0; 
305 }
306
307 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
308 inline AVCFPlayerItemLegibleOutputRef avLegibleOutput(AVFWrapper* wrapper)
309 {
310     return wrapper ? wrapper->legibleOutput() : 0;
311 }
312
313 inline AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia(AVFWrapper* wrapper)
314 {
315     return wrapper ? wrapper->safeMediaSelectionGroupForLegibleMedia() : 0;
316 }
317 #endif
318
319 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
320 static dispatch_queue_t globalQueue = nullptr;
321
322 static void initGlobalLoaderDelegateQueue(void* ctx)
323 {
324     globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
325 }
326
327 static dispatch_queue_t globalLoaderDelegateQueue()
328 {
329     static dispatch_once_t onceToken;
330
331     dispatch_once_f(&onceToken, nullptr, initGlobalLoaderDelegateQueue);
332
333     return globalQueue;
334 }
335 #endif
336
337 void MediaPlayerPrivateAVFoundationCF::registerMediaEngine(MediaEngineRegistrar registrar)
338 {
339     if (isAvailable())
340         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationCF>(player); },
341             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
342 }
343
344 MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(MediaPlayer* player)
345     : MediaPlayerPrivateAVFoundation(player)
346     , m_avfWrapper(0)
347     , m_videoFrameHasDrawn(false)
348 {
349     LOG(Media, "MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(%p)", this);
350 }
351
352 MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF()
353 {
354     LOG(Media, "MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF(%p)", this);
355 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
356     for (auto& pair : m_resourceLoaderMap)
357         pair.value->invalidate();
358 #endif
359     cancelLoad();
360 }
361
362 void MediaPlayerPrivateAVFoundationCF::cancelLoad()
363 {
364     LOG(Media, "MediaPlayerPrivateAVFoundationCF::cancelLoad(%p)", this);
365
366     // Do nothing when our cancellation of pending loading calls its completion handler
367     setDelayCallbacks(true);
368     setIgnoreLoadStateChanges(true);
369
370     tearDownVideoRendering();
371
372     clearTextTracks();
373
374     if (m_avfWrapper) {
375         // The AVCF objects have to be destroyed on the same dispatch queue used for notifications, so schedule a call to 
376         // disconnectAndDeleteAVFWrapper on that queue. 
377         m_avfWrapper->scheduleDisconnectAndDelete();
378         m_avfWrapper = 0;
379     }
380
381     setIgnoreLoadStateChanges(false);
382     setDelayCallbacks(false);
383 }
384
385 void MediaPlayerPrivateAVFoundationCF::updateVideoLayerGravity()
386 {
387     ASSERT(supportsAcceleratedRendering());
388
389     if (m_avfWrapper)
390         m_avfWrapper->updateVideoLayerGravity();
391 }
392
393 bool MediaPlayerPrivateAVFoundationCF::hasLayerRenderer() const
394 {
395     return videoLayer(m_avfWrapper);
396 }
397
398 bool MediaPlayerPrivateAVFoundationCF::hasContextRenderer() const
399 {
400     return imageGenerator(m_avfWrapper);
401 }
402
403 void MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer()
404 {
405     LOG(Media, "MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer(%p)", this);
406     ASSERT(isMainThread());
407
408     if (imageGenerator(m_avfWrapper))
409         return;
410
411     if (m_avfWrapper)
412         m_avfWrapper->createImageGenerator();
413 }
414
415 void MediaPlayerPrivateAVFoundationCF::destroyContextVideoRenderer()
416 {
417     ASSERT(isMainThread());
418     if (m_avfWrapper)
419         m_avfWrapper->destroyImageGenerator();
420 }
421
422 void MediaPlayerPrivateAVFoundationCF::createVideoLayer()
423 {
424     ASSERT(isMainThread());
425     ASSERT(supportsAcceleratedRendering());
426
427     if (m_avfWrapper)
428         m_avfWrapper->createAVCFVideoLayer();
429 }
430
431 void MediaPlayerPrivateAVFoundationCF::destroyVideoLayer()
432 {
433     ASSERT(isMainThread());
434     LOG(Media, "MediaPlayerPrivateAVFoundationCF::destroyVideoLayer(%p) - destroying %p", this, videoLayer(m_avfWrapper));
435     if (m_avfWrapper)
436         m_avfWrapper->destroyVideoLayer();
437 }
438
439 bool MediaPlayerPrivateAVFoundationCF::hasAvailableVideoFrame() const
440 {
441     return (m_videoFrameHasDrawn || (videoLayer(m_avfWrapper) && AVCFPlayerLayerIsReadyForDisplay(videoLayer(m_avfWrapper))));
442 }
443
444 void MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
445 {
446     if (m_avfWrapper)
447         m_avfWrapper->setCurrentTextTrack(track);
448 }
449
450 InbandTextTrackPrivateAVF* MediaPlayerPrivateAVFoundationCF::currentTextTrack() const
451 {
452     if (m_avfWrapper)
453         return m_avfWrapper->currentTextTrack();
454
455     return 0;
456 }
457
458 void MediaPlayerPrivateAVFoundationCF::createAVAssetForURL(const String& url)
459 {
460     ASSERT(!m_avfWrapper);
461
462     setDelayCallbacks(true);
463
464     bool inheritURI = player()->doesHaveAttribute("x-itunes-inherit-uri-query-component");
465
466     m_avfWrapper = new AVFWrapper(this);
467     m_avfWrapper->createAssetForURL(url, inheritURI);
468     setDelayCallbacks(false);
469 }
470
471 void MediaPlayerPrivateAVFoundationCF::createAVPlayer()
472 {
473     ASSERT(isMainThread());
474     ASSERT(m_avfWrapper);
475     
476     setDelayCallbacks(true);
477     m_avfWrapper->createPlayer(reinterpret_cast<IDirect3DDevice9*>(player()->graphicsDeviceAdapter()));
478     setDelayCallbacks(false);
479 }
480
481 void MediaPlayerPrivateAVFoundationCF::createAVPlayerItem()
482 {
483     ASSERT(isMainThread());
484     ASSERT(m_avfWrapper);
485     
486     setDelayCallbacks(true);
487     m_avfWrapper->createPlayerItem();
488
489     setDelayCallbacks(false);
490 }
491
492 void MediaPlayerPrivateAVFoundationCF::checkPlayability()
493 {
494     ASSERT(m_avfWrapper);
495     m_avfWrapper->checkPlayability();
496 }
497
498 void MediaPlayerPrivateAVFoundationCF::beginLoadingMetadata()
499 {
500     ASSERT(m_avfWrapper);
501     m_avfWrapper->beginLoadingMetadata();
502 }
503
504 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationCF::playerItemStatus() const
505 {
506     if (!avPlayerItem(m_avfWrapper))
507         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
508
509     AVCFPlayerItemStatus status = AVCFPlayerItemGetStatus(avPlayerItem(m_avfWrapper), 0);
510     if (status == AVCFPlayerItemStatusUnknown)
511         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
512     if (status == AVCFPlayerItemStatusFailed)
513         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
514     if (AVCFPlayerItemIsPlaybackLikelyToKeepUp(avPlayerItem(m_avfWrapper)))
515         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
516     if (AVCFPlayerItemIsPlaybackBufferFull(avPlayerItem(m_avfWrapper)))
517         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
518     if (AVCFPlayerItemIsPlaybackBufferEmpty(avPlayerItem(m_avfWrapper)))
519         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
520     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
521 }
522
523 PlatformMedia MediaPlayerPrivateAVFoundationCF::platformMedia() const
524 {
525     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformMedia(%p)", this);
526     PlatformMedia pm;
527     pm.type = PlatformMedia::AVFoundationCFMediaPlayerType;
528     pm.media.avcfMediaPlayer = (AVCFPlayer*)avPlayer(m_avfWrapper);
529     return pm;
530 }
531
532 PlatformLayer* MediaPlayerPrivateAVFoundationCF::platformLayer() const
533 {
534     ASSERT(isMainThread());
535     if (!m_avfWrapper)
536         return 0;
537
538     return m_avfWrapper->platformLayer();
539 }
540
541 void MediaPlayerPrivateAVFoundationCF::platformSetVisible(bool isVisible)
542 {
543     ASSERT(isMainThread());
544     if (!m_avfWrapper)
545         return;
546     
547     // FIXME: We use a CATransaction here on the Mac, we need to figure out why this was done there and
548     // whether we're affected by the same issue.
549     setDelayCallbacks(true);
550     m_avfWrapper->setVideoLayerHidden(!isVisible);    
551     if (!isVisible)
552         tearDownVideoRendering();
553     setDelayCallbacks(false);
554 }
555
556 void MediaPlayerPrivateAVFoundationCF::platformPlay()
557 {
558     LOG(Media, "MediaPlayerPrivateAVFoundationCF::play(%p)", this);
559     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
560         return;
561
562     setDelayCallbacks(true);
563     AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate());
564     setDelayCallbacks(false);
565 }
566
567 void MediaPlayerPrivateAVFoundationCF::platformPause()
568 {
569     LOG(Media, "MediaPlayerPrivateAVFoundationCF::pause(%p)", this);
570     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
571         return;
572
573     setDelayCallbacks(true);
574     AVCFPlayerSetRate(avPlayer(m_avfWrapper), 0);
575     setDelayCallbacks(false);
576 }
577
578 MediaTime MediaPlayerPrivateAVFoundationCF::platformDuration() const
579 {
580     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
581         return MediaTime::zeroTime();
582
583     CMTime cmDuration;
584
585     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
586     if (avPlayerItem(m_avfWrapper) && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
587         cmDuration = AVCFPlayerItemGetDuration(avPlayerItem(m_avfWrapper));
588     else
589         cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper));
590
591     if (CMTIME_IS_NUMERIC(cmDuration))
592         return toMediaTime(cmDuration);
593
594     if (CMTIME_IS_INDEFINITE(cmDuration))
595         return MediaTime::positiveInfiniteTime();
596
597     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
598     return MediaTime::invalidTime();
599 }
600
601 MediaTime MediaPlayerPrivateAVFoundationCF::currentMediaTime() const
602 {
603     if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper))
604         return MediaTime::zeroTime();
605
606     CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper));
607     if (CMTIME_IS_NUMERIC(itemTime))
608         return max(toMediaTime(itemTime), MediaTime::zeroTime());
609
610     return MediaTime::zeroTime();
611 }
612
613 void MediaPlayerPrivateAVFoundationCF::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
614 {
615     if (!m_avfWrapper)
616         return;
617     
618     // seekToTime generates several event callbacks, update afterwards.
619     setDelayCallbacks(true);
620     m_avfWrapper->seekToTime(time, negativeTolerance, positiveTolerance);
621     setDelayCallbacks(false);
622 }
623
624 void MediaPlayerPrivateAVFoundationCF::setVolume(float volume)
625 {
626     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
627         return;
628
629     AVCFPlayerSetVolume(avPlayer(m_avfWrapper), volume);
630 }
631
632 void MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(bool closedCaptionsVisible)
633 {
634     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
635         return;
636
637     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible));
638     AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(m_avfWrapper), closedCaptionsVisible);
639 }
640
641 void MediaPlayerPrivateAVFoundationCF::setRate(float rate)
642 {
643     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setRate(%p) - rate: %f", this, rate);
644     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
645         return;
646
647     setDelayCallbacks(true);
648     AVCFPlayerSetRate(avPlayer(m_avfWrapper), rate);
649     setDelayCallbacks(false);
650 }
651
652 double MediaPlayerPrivateAVFoundationCF::rate() const
653 {
654     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
655         return 0;
656
657     setDelayCallbacks(true);
658     double currentRate = AVCFPlayerGetRate(avPlayer(m_avfWrapper));
659     setDelayCallbacks(false);
660
661     return currentRate;
662 }
663
664 static bool timeRangeIsValidAndNotEmpty(CMTime start, CMTime duration)
665 {
666     // Is the range valid?
667     if (!CMTIME_IS_VALID(start) || !CMTIME_IS_VALID(duration) || duration.epoch || duration.value < 0)
668         return false;
669
670     if (CMTIME_COMPARE_INLINE(duration, ==, kCMTimeZero))
671         return false;
672
673     return true;
674 }
675
676 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationCF::platformBufferedTimeRanges() const
677 {
678     auto timeRanges = std::make_unique<PlatformTimeRanges>();
679
680     if (!avPlayerItem(m_avfWrapper))
681         return timeRanges;
682
683     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
684     if (!loadedRanges)
685         return timeRanges;
686
687     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
688     for (CFIndex i = 0; i < rangeCount; i++) {
689         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
690         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
691         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
692         
693         if (timeRangeIsValidAndNotEmpty(start, duration)) {
694             MediaTime rangeStart = toMediaTime(start);
695             MediaTime rangeEnd = rangeStart + toMediaTime(duration);
696             timeRanges->add(rangeStart, rangeEnd);
697         }
698     }
699
700     return timeRanges;
701 }
702
703 MediaTime MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 
704
705     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
706     if (!seekableRanges) 
707         return MediaTime::zeroTime(); 
708
709     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
710     bool hasValidRange = false; 
711     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
712     for (CFIndex i = 0; i < rangeCount; i++) {
713         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
714         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
715         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
716         if (!timeRangeIsValidAndNotEmpty(start, duration))
717             continue;
718
719         hasValidRange = true; 
720         MediaTime startOfRange = toMediaTime(start); 
721         if (minTimeSeekable > startOfRange) 
722             minTimeSeekable = startOfRange; 
723     } 
724     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime(); 
725
726
727 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
728 {
729     if (!avPlayerItem(m_avfWrapper))
730         return MediaTime::zeroTime();
731
732     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
733     if (!seekableRanges)
734         return MediaTime::zeroTime();
735
736     MediaTime maxTimeSeekable;
737     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
738     for (CFIndex i = 0; i < rangeCount; i++) {
739         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
740         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
741         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
742         if (!timeRangeIsValidAndNotEmpty(start, duration))
743             continue;
744         
745         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
746         if (maxTimeSeekable < endOfRange)
747             maxTimeSeekable = endOfRange;
748     }
749
750     return maxTimeSeekable;   
751 }
752
753 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
754 {
755     if (!avPlayerItem(m_avfWrapper))
756         return MediaTime::zeroTime();
757
758     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
759     if (!loadedRanges)
760         return MediaTime::zeroTime();
761
762     MediaTime maxTimeLoaded;
763     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
764     for (CFIndex i = 0; i < rangeCount; i++) {
765         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
766         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
767         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
768         if (!timeRangeIsValidAndNotEmpty(start, duration))
769             continue;
770         
771         MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
772         if (maxTimeLoaded < endOfRange)
773             maxTimeLoaded = endOfRange;
774     }
775
776     return maxTimeLoaded;   
777 }
778
779 unsigned long long MediaPlayerPrivateAVFoundationCF::totalBytes() const
780 {
781     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
782         return 0;
783
784     int64_t totalMediaSize = 0;
785     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyAssetTracks(avAsset(m_avfWrapper)));
786     CFIndex trackCount = CFArrayGetCount(tracks.get());
787     for (CFIndex i = 0; i < trackCount; i++) {
788         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), i);
789         totalMediaSize += AVCFAssetTrackGetTotalSampleDataLength(assetTrack);
790     }
791
792     return static_cast<unsigned long long>(totalMediaSize);
793 }
794
795 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationCF::assetStatus() const
796 {
797     if (!avAsset(m_avfWrapper))
798         return MediaPlayerAVAssetStatusDoesNotExist;
799
800     // First, make sure all metadata properties we rely on are loaded.
801     CFArrayRef keys = metadataKeyNames();
802     CFIndex keyCount = CFArrayGetCount(keys);
803     for (CFIndex i = 0; i < keyCount; i++) {
804         CFStringRef keyName = static_cast<CFStringRef>(CFArrayGetValueAtIndex(keys, i));
805         AVCFPropertyValueStatus keyStatus = AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), keyName, 0);
806
807         if (keyStatus < AVCFPropertyValueStatusLoaded)
808             return MediaPlayerAVAssetStatusLoading;
809         if (keyStatus == AVCFPropertyValueStatusFailed) {
810             if (CFStringCompare(keyName, AVCFAssetPropertyNaturalSize, 0) == kCFCompareEqualTo) {
811                 // Don't treat a failure to retrieve @"naturalSize" as fatal. We will use @"presentationSize" instead.
812                 // <rdar://problem/15966685>
813                 continue;
814             }
815 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
816             if (CFStringCompare(keyName, AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) == kCFCompareEqualTo) {
817                 // On Windows, the media selection options are not available when initially interacting with a streaming source.
818                 // <rdar://problem/16160699>
819                 continue;
820             }
821 #endif
822             return MediaPlayerAVAssetStatusFailed;
823         }
824         if (keyStatus == AVCFPropertyValueStatusCancelled)
825             return MediaPlayerAVAssetStatusCancelled;
826     }
827
828     if (AVCFAssetIsPlayable(avAsset(m_avfWrapper)))
829         return MediaPlayerAVAssetStatusPlayable;
830
831     return MediaPlayerAVAssetStatusLoaded;
832 }
833
834 void MediaPlayerPrivateAVFoundationCF::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
835 {
836     ASSERT(isMainThread());
837     if (!metaDataAvailable() || context->paintingDisabled())
838         return;
839
840     if (currentRenderingMode() == MediaRenderingToLayer && !imageGenerator(m_avfWrapper)) {
841         // We're being told to render into a context, but we already have the
842         // video layer, which probably means we've been called from <canvas>.
843         createContextVideoRenderer();
844     }
845
846     paint(context, rect);
847 }
848
849 void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext* context, const FloatRect& rect)
850 {
851     ASSERT(isMainThread());
852     if (!metaDataAvailable() || context->paintingDisabled() || !imageGenerator(m_avfWrapper))
853         return;
854
855     LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this);
856
857     setDelayCallbacks(true);
858     RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentMediaTime(), rect);
859     if (image) {
860         context->save();
861         context->translate(rect.x(), rect.y() + rect.height());
862         context->scale(FloatSize(1.0f, -1.0f));
863         context->setImageInterpolationQuality(InterpolationLow);
864         FloatRect paintRect(FloatPoint(), rect.size());
865         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
866         context->restore();
867         image = 0;
868     }
869     setDelayCallbacks(false);
870     
871     m_videoFrameHasDrawn = true;
872 }
873
874 static const HashSet<String>& mimeTypeCache()
875 {
876     static NeverDestroyed<HashSet<String>> cache;
877     static bool typeListInitialized = false;
878
879     if (typeListInitialized)
880         return cache;
881     typeListInitialized = true;
882
883     RetainPtr<CFArrayRef> supportedTypes = adoptCF(AVCFURLAssetCopyAudiovisualMIMETypes());
884     
885     ASSERT(supportedTypes);
886     if (!supportedTypes)
887         return cache;
888
889     CFIndex typeCount = CFArrayGetCount(supportedTypes.get());
890     for (CFIndex i = 0; i < typeCount; i++)
891         cache.get().add(static_cast<CFStringRef>(CFArrayGetValueAtIndex(supportedTypes.get(), i)));
892
893     return cache;
894
895
896 void MediaPlayerPrivateAVFoundationCF::getSupportedTypes(HashSet<String>& supportedTypes)
897 {
898     supportedTypes = mimeTypeCache();
899
900
901 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
902 static bool keySystemIsSupported(const String& keySystem)
903 {
904     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
905         return true;
906     return false;
907 }
908 #endif
909
910 static const HashSet<String>& avfMIMETypes()
911 {
912     static NeverDestroyed<HashSet<String>> cache = []() {
913         HashSet<String> types;
914         RetainPtr<CFArrayRef> avTypes = AVCFURLAssetCopyAudiovisualMIMETypes();
915
916         CFIndex typeCount = CFArrayGetCount(avTypes.get());
917         for (CFIndex i = 0; i < typeCount; ++i) {
918             String mimeType = (CFStringRef)(CFArrayGetValueAtIndex(avTypes.get(), i));
919             types.add(mimeType.lower());
920         }
921
922         return types;
923     }();
924
925     return cache;
926 }
927
928 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationCF::supportsType(const MediaEngineSupportParameters& parameters)
929 {
930     if (isUnsupportedMIMEType(parameters.type))
931         return MediaPlayer::IsNotSupported;
932
933     if (!staticMIMETypeList().contains(parameters.type) && !avfMIMETypes().contains(parameters.type))
934         return MediaPlayer::IsNotSupported;
935
936 #if HAVE(AVCFURL_PLAYABLE_MIMETYPE)
937     // The spec says:
938     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
939     if (parameters.codecs.isEmpty())
940         return MediaPlayer::MayBeSupported;
941
942     String typeString = parameters.type + "; codecs=\"" + parameters.codecs + "\"";
943     return AVCFURLAssetIsPlayableExtendedMIMEType(typeString.createCFString().get()) ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
944 #else
945     if (mimeTypeCache().contains(parameters.type))
946         return parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
947     return MediaPlayer::IsNotSupported;
948 #endif
949 }
950
951 bool MediaPlayerPrivateAVFoundationCF::supportsKeySystem(const String& keySystem, const String& mimeType)
952 {
953 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
954     if (keySystem.isEmpty())
955         return false;
956
957     if (!keySystemIsSupported(keySystem))
958         return false;
959
960     if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
961         return false;
962
963     return true;
964 #else
965     UNUSED_PARAM(keySystem);
966     UNUSED_PARAM(mimeType);
967     return false;
968 #endif
969 }
970
971 bool MediaPlayerPrivateAVFoundationCF::isAvailable()
972 {
973     return AVFoundationCFLibrary() && isCoreMediaFrameworkAvailable();
974 }
975
976 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
977 void MediaPlayerPrivateAVFoundationCF::didCancelLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
978 {
979     WebCoreAVCFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
980
981     if (resourceLoader)
982         resourceLoader->stopLoading();
983 }
984
985 void MediaPlayerPrivateAVFoundationCF::didStopLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
986 {
987     m_resourceLoaderMap.remove(avRequest);
988 }
989 #endif
990
991 MediaTime MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(const MediaTime& timeValue) const
992 {
993     if (!metaDataAvailable())
994         return timeValue;
995
996     // FIXME - can not implement until rdar://8721669 is fixed.
997     return timeValue;
998 }
999
1000 void MediaPlayerPrivateAVFoundationCF::tracksChanged()
1001 {
1002     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1003     m_languageOfPrimaryAudioTrack = String();
1004
1005     if (!avAsset(m_avfWrapper))
1006         return;
1007
1008     setDelayCharacteristicsChangedNotification(true);
1009
1010     bool haveCCTrack = false;
1011     bool hasCaptions = false;
1012
1013     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1014     // asked about those fairly frequently.
1015     if (!avPlayerItem(m_avfWrapper)) {
1016         // We don't have a player item yet, so check with the asset because some assets support inspection
1017         // prior to becoming ready to play.
1018         RetainPtr<CFArrayRef> visualTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
1019         setHasVideo(CFArrayGetCount(visualTracks.get()));
1020
1021         RetainPtr<CFArrayRef> audioTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible));
1022         setHasAudio(CFArrayGetCount(audioTracks.get()));
1023
1024 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1025         RetainPtr<CFArrayRef> captionTracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeClosedCaption));
1026         hasCaptions = CFArrayGetCount(captionTracks.get());
1027 #endif
1028     } else {
1029         bool hasVideo = false;
1030         bool hasAudio = false;
1031
1032         RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1033
1034         CFIndex trackCount = CFArrayGetCount(tracks.get());
1035         for (CFIndex i = 0; i < trackCount; i++) {
1036             AVCFPlayerItemTrackRef track = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1037             
1038             if (AVCFPlayerItemTrackIsEnabled(track)) {
1039                 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(track));
1040                 if (!assetTrack) {
1041                     // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1042                     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1043                     continue;
1044                 }
1045                 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1046                 if (!mediaType)
1047                     continue;
1048                 
1049                 if (CFStringCompare(mediaType, AVCFMediaTypeVideo, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1050                     hasVideo = true;
1051                 else if (CFStringCompare(mediaType, AVCFMediaTypeAudio, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1052                     hasAudio = true;
1053                 else if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) == kCFCompareEqualTo) {
1054 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1055                     hasCaptions = true;
1056 #endif
1057                     haveCCTrack = true;
1058                 }
1059             }
1060         }
1061
1062         setHasVideo(hasVideo);
1063         setHasAudio(hasAudio);
1064     }
1065
1066 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1067     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1068     if (legibleGroup) {
1069         RetainPtr<CFArrayRef> playableOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1070         hasCaptions = CFArrayGetCount(playableOptions.get());
1071         if (hasCaptions)
1072             processMediaSelectionOptions();
1073     }
1074 #endif
1075
1076 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1077     if (haveCCTrack)
1078         processLegacyClosedCaptionsTracks();
1079 #endif
1080
1081     setHasClosedCaptions(hasCaptions);
1082
1083     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s", 
1084         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1085
1086     sizeChanged();
1087
1088     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1089         characteristicsChanged();
1090
1091     setDelayCharacteristicsChangedNotification(false);
1092 }
1093
1094 void MediaPlayerPrivateAVFoundationCF::sizeChanged()
1095 {
1096     ASSERT(isMainThread());
1097     if (!avAsset(m_avfWrapper))
1098         return;
1099     
1100     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1101     // the union of all visual track rects.
1102     CGRect trackRectUnion = CGRectZero;
1103     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
1104     CFIndex trackCount = CFArrayGetCount(tracks.get());
1105     for (CFIndex i = 0; i < trackCount; i++) {
1106         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1107         
1108         CGSize trackSize = AVCFAssetTrackGetNaturalSize(assetTrack);
1109         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1110         trackRectUnion = CGRectUnion(trackRectUnion, CGRectApplyAffineTransform(trackRect, AVCFAssetTrackGetPreferredTransform(assetTrack)));
1111     }
1112     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1113     trackRectUnion = CGRectOffset(trackRectUnion, trackRectUnion.origin.x, trackRectUnion.origin.y);
1114     CGSize naturalSize = trackRectUnion.size;
1115
1116     if (!naturalSize.height && !naturalSize.width && avPlayerItem(m_avfWrapper))
1117         naturalSize = AVCFPlayerItemGetPresentationSize(avPlayerItem(m_avfWrapper));
1118
1119     // Also look at the asset's preferred transform so we account for a movie matrix.
1120     CGSize movieSize = CGSizeApplyAffineTransform(AVCFAssetGetNaturalSize(avAsset(m_avfWrapper)), AVCFAssetGetPreferredTransform(avAsset(m_avfWrapper)));
1121     if (movieSize.width > naturalSize.width)
1122         naturalSize.width = movieSize.width;
1123     if (movieSize.height > naturalSize.height)
1124         naturalSize.height = movieSize.height;
1125     setNaturalSize(IntSize(naturalSize));
1126 }
1127
1128 bool MediaPlayerPrivateAVFoundationCF::requiresImmediateCompositing() const
1129 {
1130     // The AVFoundationCF player needs to have the root compositor available at construction time
1131     // so it can attach to the rendering device. Otherwise it falls back to CPU-only mode.
1132     //
1133     // It would be nice if AVCFPlayer had some way to switch to hardware-accelerated mode
1134     // when asked, then we could follow AVFoundation's model and switch to compositing
1135     // mode when beginning to play media.
1136     return true;
1137 }
1138
1139 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
1140 RetainPtr<AVCFAssetResourceLoadingRequestRef> MediaPlayerPrivateAVFoundationCF::takeRequestForKeyURI(const String& keyURI)
1141 {
1142     if (!m_avfWrapper)
1143         return nullptr;
1144
1145     return m_avfWrapper->takeRequestForKeyURI(keyURI);
1146 }
1147
1148 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem)
1149 {
1150     if (!keySystemIsSupported(keySystem))
1151         return nullptr;
1152
1153     return std::make_unique<CDMSessionAVFoundationCF>(this);
1154 }
1155 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1156 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem)
1157 {
1158     return nullptr;
1159 }
1160 #endif
1161
1162 long MediaPlayerPrivateAVFoundationCF::assetErrorCode() const
1163 {
1164     if (!avAsset(m_avfWrapper))
1165         return 0;
1166
1167     CFErrorRef error = nullptr;
1168     AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), AVCFAssetPropertyPlayable, &error);
1169     if (!error)
1170         return 0;
1171
1172     long code = CFErrorGetCode(error);
1173     CFRelease(error);
1174     return code;
1175 }
1176
1177 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1178 void MediaPlayerPrivateAVFoundationCF::processLegacyClosedCaptionsTracks()
1179 {
1180 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1181     AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), 0, safeMediaSelectionGroupForLegibleMedia(m_avfWrapper));
1182 #endif
1183
1184     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1185     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1186     CFIndex trackCount = CFArrayGetCount(tracks.get());
1187     for (CFIndex i = 0; i < trackCount; ++i) {
1188         AVCFPlayerItemTrackRef playerItemTrack = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1189
1190         RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(playerItemTrack));
1191         if (!assetTrack) {
1192             // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1193             LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1194             continue;
1195         }
1196         CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1197         if (!mediaType)
1198             continue;
1199                 
1200         if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) != kCFCompareEqualTo)
1201             continue;
1202
1203         bool newCCTrack = true;
1204         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1205             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1206                 continue;
1207
1208             RefPtr<InbandTextTrackPrivateLegacyAVCF> track = static_cast<InbandTextTrackPrivateLegacyAVCF*>(m_textTracks[i - 1].get());
1209             if (track->avPlayerItemTrack() == playerItemTrack) {
1210                 removedTextTracks.remove(i - 1);
1211                 newCCTrack = false;
1212                 break;
1213             }
1214         }
1215
1216         if (!newCCTrack)
1217             continue;
1218         
1219         m_textTracks.append(InbandTextTrackPrivateLegacyAVCF::create(this, playerItemTrack));
1220     }
1221
1222     processNewAndRemovedTextTracks(removedTextTracks);
1223 }
1224 #endif
1225
1226 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1227 void MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions()
1228 {
1229     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1230     if (!legibleGroup) {
1231         LOG(Media, "MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1232         return;
1233     }
1234
1235     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1236     // but set the selected legible track to nil so text tracks will not be automatically configured.
1237     if (!m_textTracks.size() && AVCFMediaSelectionGroupAllowsEmptySelection(legibleGroup)) {
1238         if (AVCFPlayerItemRef playerItem = avPlayerItem(m_avfWrapper))
1239             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(playerItem, 0, legibleGroup);
1240     }
1241
1242     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1243     RetainPtr<CFArrayRef> legibleOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1244     CFIndex legibleOptionsCount = CFArrayGetCount(legibleOptions.get());
1245     for (CFIndex i = 0; i < legibleOptionsCount; ++i) {
1246         AVCFMediaSelectionOptionRef option = static_cast<AVCFMediaSelectionOptionRef>(CFArrayGetValueAtIndex(legibleOptions.get(), i));
1247         bool newTrack = true;
1248         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1249             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1250                 continue;
1251
1252             RefPtr<InbandTextTrackPrivateAVCF> track = static_cast<InbandTextTrackPrivateAVCF*>(removedTextTracks[i - 1].get());
1253             if (CFEqual(track->mediaSelectionOption(), option)) {
1254                 removedTextTracks.remove(i - 1);
1255                 newTrack = false;
1256                 break;
1257             }
1258         }
1259         if (!newTrack)
1260             continue;
1261
1262         m_textTracks.append(InbandTextTrackPrivateAVCF::create(this, option, InbandTextTrackPrivate::Generic));
1263     }
1264
1265     processNewAndRemovedTextTracks(removedTextTracks);
1266 }
1267
1268 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1269
1270 void AVFWrapper::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
1271 {
1272     if (m_currentTextTrack == track)
1273         return;
1274
1275     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1276         
1277     m_currentTextTrack = track;
1278
1279     if (track) {
1280         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1281             AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), TRUE);
1282 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1283         else
1284             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), static_cast<InbandTextTrackPrivateAVCF*>(track)->mediaSelectionOption(), safeMediaSelectionGroupForLegibleMedia());
1285 #endif
1286     } else {
1287 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1288         AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), 0, safeMediaSelectionGroupForLegibleMedia());
1289 #endif
1290         AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), FALSE);
1291     }
1292 }
1293
1294 String MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack() const
1295 {
1296     if (!m_languageOfPrimaryAudioTrack.isNull())
1297         return m_languageOfPrimaryAudioTrack;
1298
1299     if (!avPlayerItem(m_avfWrapper))
1300         return emptyString();
1301
1302 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1303     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1304     AVCFMediaSelectionGroupRef audibleGroup = AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible);
1305     AVCFMediaSelectionOptionRef currentlySelectedAudibleOption = AVCFPlayerItemGetSelectedMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), audibleGroup);
1306     if (currentlySelectedAudibleOption) {
1307         RetainPtr<CFLocaleRef> audibleOptionLocale = adoptCF(AVCFMediaSelectionOptionCopyLocale(currentlySelectedAudibleOption));
1308         if (audibleOptionLocale)
1309             m_languageOfPrimaryAudioTrack = CFLocaleGetIdentifier(audibleOptionLocale.get());
1310         else
1311             m_languageOfPrimaryAudioTrack = emptyString();
1312
1313         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1314
1315         return m_languageOfPrimaryAudioTrack;
1316     }
1317 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1318
1319     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1320     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1321     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeAudio));
1322     CFIndex trackCount = CFArrayGetCount(tracks.get());
1323     if (!tracks || trackCount != 1) {
1324         m_languageOfPrimaryAudioTrack = emptyString();
1325         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? trackCount : 0));
1326         return m_languageOfPrimaryAudioTrack;
1327     }
1328
1329     AVCFAssetTrackRef track = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), 0);
1330     RetainPtr<CFStringRef> language = adoptCF(AVCFAssetTrackCopyExtendedLanguageTag(track));
1331
1332     // If the language code is stored as a QuickTime 5-bit packed code there aren't enough bits for a full
1333     // RFC 4646 language tag so extendedLanguageTag returns null. In this case languageCode will return the
1334     // ISO 639-2/T language code so check it.
1335     if (!language)
1336         language = adoptCF(AVCFAssetTrackCopyLanguageCode(track));
1337
1338     // Some legacy tracks have "und" as a language, treat that the same as no language at all.
1339     if (language && CFStringCompare(language.get(), CFSTR("und"), kCFCompareCaseInsensitive) != kCFCompareEqualTo) {
1340         m_languageOfPrimaryAudioTrack = language.get();
1341         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1342         return m_languageOfPrimaryAudioTrack;
1343     }
1344
1345     LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1346     m_languageOfPrimaryAudioTrack = emptyString();
1347     return m_languageOfPrimaryAudioTrack;
1348 }
1349
1350 void MediaPlayerPrivateAVFoundationCF::contentsNeedsDisplay()
1351 {
1352     if (m_avfWrapper)
1353         m_avfWrapper->setVideoLayerNeedsCommit();
1354 }
1355
1356 AVFWrapper::AVFWrapper(MediaPlayerPrivateAVFoundationCF* owner)
1357     : m_owner(owner)
1358     , m_objectID(s_nextAVFWrapperObjectID++)
1359     , m_currentTextTrack(0)
1360 {
1361     ASSERT(isMainThread());
1362     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1363     LOG(Media, "AVFWrapper::AVFWrapper(%p)", this);
1364
1365     m_notificationQueue = dispatch_queue_create("MediaPlayerPrivateAVFoundationCF.notificationQueue", 0);
1366
1367 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1368     m_resourceLoaderCallbacks.version = kAVCFAssetResourceLoader_CallbacksVersion_1;
1369     m_resourceLoaderCallbacks.context = nullptr;
1370     m_resourceLoaderCallbacks.resourceLoaderShouldWaitForLoadingOfRequestedResource = AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource;
1371 #endif
1372
1373     addToMap();
1374 }
1375
1376 AVFWrapper::~AVFWrapper()
1377 {
1378     ASSERT(isMainThread());
1379     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1380     LOG(Media, "AVFWrapper::~AVFWrapper(%p %d)", this, m_objectID);
1381
1382     destroyVideoLayer();
1383     destroyImageGenerator();
1384
1385     if (m_notificationQueue)
1386         dispatch_release(m_notificationQueue);
1387
1388     if (avAsset()) {
1389         AVCFAssetCancelLoading(avAsset());
1390         m_avAsset = 0;
1391     }
1392
1393 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1394     if (legibleOutput()) {
1395         if (avPlayerItem())
1396             AVCFPlayerItemRemoveOutput(avPlayerItem(), legibleOutput());
1397         m_legibleOutput = 0;
1398     }
1399 #endif
1400
1401     m_avPlayerItem = 0;
1402     m_timeObserver = 0;
1403     m_avPlayer = 0;
1404 }
1405
1406 Mutex& AVFWrapper::mapLock()
1407 {
1408     static Mutex mapLock;
1409     return mapLock;
1410 }
1411
1412 HashMap<uintptr_t, AVFWrapper*>& AVFWrapper::map()
1413 {
1414     static HashMap<uintptr_t, AVFWrapper*>& map = *new HashMap<uintptr_t, AVFWrapper*>;
1415     return map;
1416 }
1417
1418 void AVFWrapper::addToMap()
1419 {
1420     MutexLocker locker(mapLock());
1421     
1422     // HashMap doesn't like a key of 0, and also make sure we aren't
1423     // using an object ID that's already in use.
1424     while (!m_objectID || (map().find(m_objectID) != map().end()))
1425         m_objectID = s_nextAVFWrapperObjectID++;
1426        
1427     LOG(Media, "AVFWrapper::addToMap(%p %d)", this, m_objectID);
1428
1429     map().add(m_objectID, this);
1430 }
1431
1432 void AVFWrapper::removeFromMap() const
1433 {
1434     LOG(Media, "AVFWrapper::removeFromMap(%p %d)", this, m_objectID);
1435
1436     MutexLocker locker(mapLock());
1437     map().remove(m_objectID);
1438 }
1439
1440 AVFWrapper* AVFWrapper::avfWrapperForCallbackContext(void* context)
1441 {
1442     // Assumes caller has locked mapLock().
1443     HashMap<uintptr_t, AVFWrapper*>::iterator it = map().find(reinterpret_cast<uintptr_t>(context));
1444     if (it == map().end())
1445         return 0;
1446
1447     return it->value;
1448 }
1449
1450 void AVFWrapper::scheduleDisconnectAndDelete()
1451 {
1452     // Ignore any subsequent notifications we might receive in notificationCallback().
1453     removeFromMap();
1454
1455     dispatch_async_f(dispatchQueue(), this, disconnectAndDeleteAVFWrapper);
1456 }
1457
1458 static void destroyAVFWrapper(void* context)
1459 {
1460     ASSERT(isMainThread());
1461     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1462     if (!avfWrapper)
1463         return;
1464
1465     delete avfWrapper;
1466 }
1467
1468 void AVFWrapper::disconnectAndDeleteAVFWrapper(void* context)
1469 {
1470     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1471
1472     LOG(Media, "AVFWrapper::disconnectAndDeleteAVFWrapper(%p)", avfWrapper);
1473
1474     if (avfWrapper->avPlayerItem()) {
1475         CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1476         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDidPlayToEndTimeNotification, avfWrapper->avPlayerItem());
1477         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemStatusChangedNotification, avfWrapper->avPlayerItem());
1478         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemTracksChangedNotification, avfWrapper->avPlayerItem());
1479         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemSeekableTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1480         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemLoadedTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1481         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemPresentationSizeChangedNotification, avfWrapper->avPlayerItem());
1482         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, avfWrapper->avPlayerItem());
1483         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, avfWrapper->avPlayerItem());
1484         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferFullChangedNotification, avfWrapper->avPlayerItem());
1485         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDurationChangedNotification, avfWrapper->avPlayerItem());
1486         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), CACFContextNeedsFlushNotification(), 0);
1487     }
1488
1489     if (avfWrapper->avPlayer()) {
1490         if (avfWrapper->timeObserver())
1491             AVCFPlayerRemoveObserver(avfWrapper->avPlayer(), avfWrapper->timeObserver());
1492
1493         CFNotificationCenterRemoveObserver(CFNotificationCenterGetLocalCenter(), avfWrapper->callbackContext(), AVCFPlayerRateChangedNotification, avfWrapper->avPlayer());
1494     }
1495
1496 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1497     AVCFPlayerItemRemoveOutput(avfWrapper->avPlayerItem(), avfWrapper->legibleOutput());
1498 #endif
1499
1500     // We must release the AVCFPlayer and other items on the same thread that created them.
1501     dispatch_async_f(dispatch_get_main_queue(), context, destroyAVFWrapper);
1502 }
1503
1504 void AVFWrapper::createAssetForURL(const String& url, bool inheritURI)
1505 {
1506     ASSERT(!avAsset());
1507
1508     RetainPtr<CFURLRef> urlRef = URL(ParsedURLString, url).createCFURL();
1509
1510     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1511
1512     if (inheritURI)
1513         CFDictionarySetValue(optionsRef.get(), AVCFURLAssetInheritURIQueryComponentFromReferencingURIKey, kCFBooleanTrue);
1514
1515     m_avAsset = adoptCF(AVCFURLAssetCreateWithURLAndOptions(kCFAllocatorDefault, urlRef.get(), optionsRef.get(), m_notificationQueue));
1516
1517 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1518     ASSERT(callbackContext());
1519     m_resourceLoaderCallbacks.context = callbackContext();
1520
1521     AVCFAssetResourceLoaderRef resourceLoader = AVCFURLAssetGetResourceLoader(m_avAsset.get());
1522     AVCFAssetResourceLoaderSetCallbacks(resourceLoader, &m_resourceLoaderCallbacks, globalLoaderDelegateQueue());
1523 #endif
1524 }
1525
1526 void AVFWrapper::createPlayer(IDirect3DDevice9* d3dDevice)
1527 {
1528     ASSERT(isMainThread());
1529     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1530     ASSERT(avPlayerItem());
1531
1532     if (avPlayer())
1533         return;
1534
1535     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1536
1537     if (d3dDevice) {
1538         // QI for an IDirect3DDevice9Ex interface, it is required to do HW video decoding.
1539         COMPtr<IDirect3DDevice9Ex> d3dEx(Query, d3dDevice);
1540         m_d3dDevice = d3dEx;
1541     } else
1542         m_d3dDevice = 0;
1543
1544     if (m_d3dDevice && AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey)
1545         CFDictionarySetValue(optionsRef.get(), AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey, kCFBooleanTrue);
1546
1547 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1548     CFDictionarySetValue(optionsRef.get(), AVCFPlayerAppliesMediaSelectionCriteriaAutomaticallyKey, kCFBooleanTrue);
1549 #endif
1550
1551     // FIXME: We need a way to create a AVPlayer without an AVPlayerItem, see <rdar://problem/9877730>.
1552     AVCFPlayerRef playerRef = AVCFPlayerCreateWithPlayerItemAndOptions(kCFAllocatorDefault, avPlayerItem(), optionsRef.get(), m_notificationQueue);
1553     m_avPlayer = adoptCF(playerRef);
1554 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1555     AVCFPlayerSetClosedCaptionDisplayEnabled(playerRef, FALSE);
1556 #endif
1557
1558     if (m_d3dDevice && AVCFPlayerSetDirect3DDevicePtr())
1559         AVCFPlayerSetDirect3DDevicePtr()(playerRef, m_d3dDevice.get());
1560
1561     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1562     ASSERT(center);
1563
1564     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerRateChangedNotification, playerRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1565
1566     // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but
1567     // our observer will also be called whenever a seek happens.
1568     const double veryLongInterval = 60*60*60*24*30;
1569     m_timeObserver = adoptCF(AVCFPlayerCreatePeriodicTimeObserverForInterval(playerRef, CMTimeMake(veryLongInterval, 10), m_notificationQueue, &periodicTimeObserverCallback, callbackContext()));
1570 }
1571
1572 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1573 static RetainPtr<CFArrayRef> createLegibleOutputSubtypes()
1574 {
1575     int webVTTInt = 'wvtt'; // kCMSubtitleFormatType_WebVTT;
1576     RetainPtr<CFNumberRef> webVTTNumber = adoptCF(CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &webVTTInt));
1577     CFTypeRef formatTypes[] = { webVTTNumber.get() };
1578     return adoptCF(CFArrayCreate(0, formatTypes, WTF_ARRAY_LENGTH(formatTypes), &kCFTypeArrayCallBacks));
1579 }
1580 #endif
1581
1582 void AVFWrapper::createPlayerItem()
1583 {
1584     ASSERT(isMainThread());
1585     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1586     ASSERT(avAsset());
1587
1588     if (avPlayerItem())
1589         return;
1590
1591     // Create the player item so we begin loading media data.
1592     AVCFPlayerItemRef itemRef = AVCFPlayerItemCreateWithAsset(kCFAllocatorDefault, avAsset(), m_notificationQueue);
1593     m_avPlayerItem = adoptCF(itemRef);
1594
1595     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1596     ASSERT(center);
1597
1598     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDidPlayToEndTimeNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1599     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemStatusChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1600     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemTracksChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1601     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemSeekableTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1602     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemLoadedTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1603     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemPresentationSizeChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1604     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1605     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1606     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferFullChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1607     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDurationChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1608     // FIXME: Are there other legible output things we need to register for? asset and hasEnabledAudio are not exposed by AVCF
1609
1610     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, CACFContextNeedsFlushNotification(), 0, CFNotificationSuspensionBehaviorDeliverImmediately);
1611
1612 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1613     const CFTimeInterval legibleOutputAdvanceInterval = 2;
1614
1615     m_legibleOutput = adoptCF(AVCFPlayerItemLegibleOutputCreateWithMediaSubtypesForNativeRepresentation(kCFAllocatorDefault, createLegibleOutputSubtypes().get()));
1616     AVCFPlayerItemOutputSetSuppressPlayerRendering(m_legibleOutput.get(), TRUE);
1617
1618     AVCFPlayerItemLegibleOutputCallbacks callbackInfo;
1619 #if HAVE(AVCFPLAYERITEM_CALLBACK_VERSION_2)
1620     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_2;
1621 #else
1622     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_1;
1623 #endif
1624     ASSERT(callbackContext());
1625     callbackInfo.context = callbackContext();
1626     callbackInfo.legibleOutputCallback = AVFWrapper::legibleOutputCallback;
1627
1628     AVCFPlayerItemLegibleOutputSetCallbacks(m_legibleOutput.get(), &callbackInfo, dispatchQueue());
1629     AVCFPlayerItemLegibleOutputSetAdvanceIntervalForCallbackInvocation(m_legibleOutput.get(), legibleOutputAdvanceInterval);
1630     AVCFPlayerItemLegibleOutputSetTextStylingResolution(m_legibleOutput.get(), AVCFPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly);
1631     AVCFPlayerItemAddOutput(m_avPlayerItem.get(), m_legibleOutput.get());
1632 #endif
1633 }
1634
1635 void AVFWrapper::periodicTimeObserverCallback(AVCFPlayerRef, CMTime cmTime, void* context)
1636 {
1637     MutexLocker locker(mapLock());
1638     AVFWrapper* self = avfWrapperForCallbackContext(context);
1639     if (!self) {
1640         LOG(Media, "AVFWrapper::periodicTimeObserverCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1641         return;
1642     }
1643
1644     double time = std::max(0.0, CMTimeGetSeconds(cmTime)); // Clamp to zero, negative values are sometimes reported.
1645     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time);
1646 }
1647
1648 struct NotificationCallbackData {
1649     RetainPtr<CFStringRef> m_propertyName;
1650     void* m_context;
1651
1652     NotificationCallbackData(CFStringRef propertyName, void* context)
1653         : m_propertyName(propertyName), m_context(context)
1654     {
1655     }
1656 };
1657
1658 void AVFWrapper::processNotification(void* context)
1659 {
1660     ASSERT(isMainThread());
1661     ASSERT(context);
1662
1663     if (!context)
1664         return;
1665
1666     std::unique_ptr<NotificationCallbackData> notificationData { static_cast<NotificationCallbackData*>(context) };
1667
1668     MutexLocker locker(mapLock());
1669     AVFWrapper* self = avfWrapperForCallbackContext(notificationData->m_context);
1670     if (!self) {
1671         LOG(Media, "AVFWrapper::processNotification invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1672         return;
1673     }
1674
1675     CFStringRef propertyName = notificationData->m_propertyName.get();
1676
1677     if (CFEqual(propertyName, AVCFPlayerItemDidPlayToEndTimeNotification))
1678         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
1679     else if (CFEqual(propertyName, AVCFPlayerItemTracksChangedNotification))
1680         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1681     else if (CFEqual(propertyName, AVCFPlayerItemStatusChangedNotification)) {
1682         AVCFURLAssetRef asset = AVCFPlayerItemGetAsset(self->avPlayerItem());
1683         if (asset)
1684             self->setAsset(asset);
1685         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
1686     } else if (CFEqual(propertyName, AVCFPlayerItemSeekableTimeRangesChangedNotification))
1687         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
1688     else if (CFEqual(propertyName, AVCFPlayerItemLoadedTimeRangesChangedNotification))
1689         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
1690     else if (CFEqual(propertyName, AVCFPlayerItemPresentationSizeChangedNotification))
1691         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
1692     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification))
1693         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
1694     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification))
1695         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
1696     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferFullChangedNotification))
1697         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
1698     else if (CFEqual(propertyName, AVCFPlayerRateChangedNotification))
1699         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
1700     else if (CFEqual(propertyName, CACFContextNeedsFlushNotification()))
1701         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ContentsNeedsDisplay);
1702     else if (CFEqual(propertyName, AVCFPlayerItemDurationChangedNotification))
1703         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged);
1704     else
1705         ASSERT_NOT_REACHED();
1706 }
1707
1708 void AVFWrapper::notificationCallback(CFNotificationCenterRef, void* observer, CFStringRef propertyName, const void* object, CFDictionaryRef)
1709 {
1710 #if !LOG_DISABLED
1711     char notificationName[256];
1712     CFStringGetCString(propertyName, notificationName, sizeof(notificationName), kCFStringEncodingASCII);
1713     LOG(Media, "AVFWrapper::notificationCallback(if=%d) %s", reinterpret_cast<uintptr_t>(observer), notificationName);
1714 #endif
1715
1716     auto notificationData = std::make_unique<NotificationCallbackData>(propertyName, observer);
1717
1718     dispatch_async_f(dispatch_get_main_queue(), notificationData.release(), processNotification);
1719 }
1720
1721 void AVFWrapper::loadPlayableCompletionCallback(AVCFAssetRef, void* context)
1722 {
1723     MutexLocker locker(mapLock());
1724     AVFWrapper* self = avfWrapperForCallbackContext(context);
1725     if (!self) {
1726         LOG(Media, "AVFWrapper::loadPlayableCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1727         return;
1728     }
1729
1730     LOG(Media, "AVFWrapper::loadPlayableCompletionCallback(%p)", self);
1731     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1732 }
1733
1734 void AVFWrapper::checkPlayability()
1735 {
1736     LOG(Media, "AVFWrapper::checkPlayability(%p)", this);
1737
1738     static CFArrayRef propertyKeyName;
1739     if (!propertyKeyName) {
1740         static const CFStringRef keyNames[] = { 
1741             AVCFAssetPropertyPlayable
1742         };
1743         propertyKeyName = CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
1744     }
1745
1746     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), propertyKeyName, loadPlayableCompletionCallback, callbackContext());
1747 }
1748
1749 void AVFWrapper::loadMetadataCompletionCallback(AVCFAssetRef, void* context)
1750 {
1751     MutexLocker locker(mapLock());
1752     AVFWrapper* self = avfWrapperForCallbackContext(context);
1753     if (!self) {
1754         LOG(Media, "AVFWrapper::loadMetadataCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1755         return;
1756     }
1757
1758     LOG(Media, "AVFWrapper::loadMetadataCompletionCallback(%p)", self);
1759     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
1760 }
1761
1762 void AVFWrapper::beginLoadingMetadata()
1763 {
1764     ASSERT(avAsset());
1765     LOG(Media, "AVFWrapper::beginLoadingMetadata(%p) - requesting metadata loading", this);
1766     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), metadataKeyNames(), loadMetadataCompletionCallback, callbackContext());
1767 }
1768
1769 void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void* context)
1770 {
1771     MutexLocker locker(mapLock());
1772     AVFWrapper* self = avfWrapperForCallbackContext(context);
1773     if (!self) {
1774         LOG(Media, "AVFWrapper::seekCompletedCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1775         return;
1776     }
1777
1778     LOG(Media, "AVFWrapper::seekCompletedCallback(%p)", self);
1779     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
1780 }
1781
1782 void AVFWrapper::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1783 {
1784     ASSERT(avPlayerItem());
1785     CMTime cmTime = toCMTime(time);
1786     CMTime cmBefore = toCMTime(negativeTolerance);
1787     CMTime cmAfter = toCMTime(positiveTolerance);
1788     AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), cmTime, cmBefore, cmAfter, &seekCompletedCallback, callbackContext());
1789 }
1790
1791 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1792 struct LegibleOutputData {
1793     RetainPtr<CFArrayRef> m_attributedStrings;
1794     RetainPtr<CFArrayRef> m_samples;
1795     MediaTime m_time;
1796     void* m_context;
1797
1798     LegibleOutputData(CFArrayRef strings, CFArrayRef samples, const MediaTime &time, void* context)
1799         : m_attributedStrings(strings), m_samples(samples), m_time(time), m_context(context)
1800     {
1801     }
1802 };
1803
1804 void AVFWrapper::processCue(void* context)
1805 {
1806     ASSERT(isMainThread());
1807     ASSERT(context);
1808
1809     if (!context)
1810         return;
1811
1812     std::unique_ptr<LegibleOutputData> legibleOutputData(reinterpret_cast<LegibleOutputData*>(context));
1813
1814     MutexLocker locker(mapLock());
1815     AVFWrapper* self = avfWrapperForCallbackContext(legibleOutputData->m_context);
1816     if (!self) {
1817         LOG(Media, "AVFWrapper::processCue invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1818         return;
1819     }
1820
1821     if (!self->m_currentTextTrack)
1822         return;
1823
1824     self->m_currentTextTrack->processCue(legibleOutputData->m_attributedStrings.get(), legibleOutputData->m_samples.get(), legibleOutputData->m_time);
1825 }
1826
1827 void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef legibleOutput, CFArrayRef attributedStrings, CFArrayRef nativeSampleBuffers, CMTime itemTime)
1828 {
1829     ASSERT(!isMainThread());
1830     MutexLocker locker(mapLock());
1831     AVFWrapper* self = avfWrapperForCallbackContext(context);
1832     if (!self) {
1833         LOG(Media, "AVFWrapper::legibleOutputCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1834         return;
1835     }
1836
1837     LOG(Media, "AVFWrapper::legibleOutputCallback(%p)", self);
1838
1839     ASSERT(legibleOutput == self->m_legibleOutput);
1840
1841     auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, toMediaTime(itemTime), context);
1842
1843     dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.release(), processCue);
1844 }
1845 #endif
1846
1847 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1848 struct LoadRequestData {
1849     RetainPtr<AVCFAssetResourceLoadingRequestRef> m_request;
1850     void* m_context;
1851
1852     LoadRequestData(AVCFAssetResourceLoadingRequestRef request, void* context)
1853         : m_request(request), m_context(context)
1854     {
1855     }
1856 };
1857
1858 void AVFWrapper::processShouldWaitForLoadingOfResource(void* context)
1859 {
1860     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1861     ASSERT(context);
1862
1863     if (!context)
1864         return;
1865
1866     std::unique_ptr<LoadRequestData> loadRequestData(reinterpret_cast<LoadRequestData*>(context));
1867
1868     MutexLocker locker(mapLock());
1869     AVFWrapper* self = avfWrapperForCallbackContext(loadRequestData->m_context);
1870     if (!self) {
1871         LOG(Media, "AVFWrapper::processShouldWaitForLoadingOfResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1872         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1873         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1874         return;
1875     }
1876
1877     if (!self->shouldWaitForLoadingOfResource(loadRequestData->m_request.get())) {
1878         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1879         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1880     }
1881 }
1882
1883 bool AVFWrapper::shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest)
1884 {
1885 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
1886     RetainPtr<CFURLRequestRef> urlRequest = AVCFAssetResourceLoadingRequestGetURLRequest(avRequest);
1887     RetainPtr<CFURLRef> requestURL = CFURLRequestGetURL(urlRequest.get());
1888     RetainPtr<CFStringRef> schemeRef = adoptCF(CFURLCopyScheme(requestURL.get()));
1889     String scheme = schemeRef.get();
1890
1891     if (scheme == "skd") {
1892         RetainPtr<CFURLRef> absoluteURL = adoptCF(CFURLCopyAbsoluteURL(requestURL.get()));
1893         RetainPtr<CFStringRef> keyURIRef = CFURLGetString(absoluteURL.get());
1894         String keyURI = keyURIRef.get();
1895
1896         // Create an initData with the following layout:
1897         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1898         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1899         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1900         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1901         initDataView->set<uint32_t>(0, keyURISize, true);
1902
1903         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1904         keyURIArray->setRange(reinterpret_cast<const uint16_t*>(StringView(keyURI).upconvertedCharacters().get()), keyURI.length() / sizeof(unsigned char), 0);
1905
1906         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1907         if (!m_owner->player()->keyNeeded(initData.get()))
1908             return false;
1909
1910         setRequestForKey(keyURI, avRequest);
1911         return true;
1912     }
1913 #endif
1914
1915     RefPtr<WebCoreAVCFResourceLoader> resourceLoader = WebCoreAVCFResourceLoader::create(m_owner, avRequest);
1916     m_owner->m_resourceLoaderMap.add(avRequest, resourceLoader);
1917     resourceLoader->startLoading();
1918     return true;
1919 }
1920
1921 Boolean AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef resourceLoader, AVCFAssetResourceLoadingRequestRef loadingRequest, void *context)
1922 {
1923     ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue());
1924     MutexLocker locker(mapLock());
1925     AVFWrapper* self = avfWrapperForCallbackContext(context);
1926     if (!self) {
1927         LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1928         return false;
1929     }
1930
1931     LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(%p)", self);
1932
1933     auto loadRequestData = std::make_unique<LoadRequestData>(loadingRequest, context);
1934
1935     dispatch_async_f(dispatch_get_main_queue(), loadRequestData.release(), processShouldWaitForLoadingOfResource);
1936
1937     return true;
1938 }
1939 #endif
1940
1941 void AVFWrapper::setAsset(AVCFURLAssetRef asset)
1942 {
1943     if (asset == avAsset())
1944         return;
1945
1946     AVCFAssetCancelLoading(avAsset());
1947     m_avAsset = asset;
1948 }
1949
1950 PlatformLayer* AVFWrapper::platformLayer()
1951 {
1952     ASSERT(isMainThread());
1953     if (m_videoLayerWrapper)
1954         return m_videoLayerWrapper->platformLayer();
1955
1956     if (!videoLayer())
1957         return 0;
1958
1959     // Create a PlatformCALayer so we can resize the video layer to match the element size.
1960     m_layerClient = std::make_unique<LayerClient>(this);
1961     if (!m_layerClient)
1962         return 0;
1963
1964     m_videoLayerWrapper = PlatformCALayerWin::create(PlatformCALayer::LayerTypeLayer, m_layerClient.get());
1965     if (!m_videoLayerWrapper)
1966         return 0;
1967
1968     m_caVideoLayer = adoptCF(AVCFPlayerLayerCopyCACFLayer(m_avCFVideoLayer.get()));
1969
1970     CACFLayerInsertSublayer(m_videoLayerWrapper->platformLayer(), m_caVideoLayer.get(), 0);
1971     m_videoLayerWrapper->setAnchorPoint(FloatPoint3D());
1972     m_videoLayerWrapper->setNeedsLayout();
1973     updateVideoLayerGravity();
1974
1975     return m_videoLayerWrapper->platformLayer();
1976 }
1977
1978 void AVFWrapper::createAVCFVideoLayer()
1979 {
1980     ASSERT(isMainThread());
1981     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1982     if (!avPlayer() || m_avCFVideoLayer)
1983         return;
1984
1985     // The layer will get hooked up via RenderLayerBacking::updateConfiguration().
1986     m_avCFVideoLayer = adoptCF(AVCFPlayerLayerCreateWithAVCFPlayer(kCFAllocatorDefault, avPlayer(), m_notificationQueue));
1987     LOG(Media, "AVFWrapper::createAVCFVideoLayer(%p) - returning %p", this, videoLayer());
1988 }
1989
1990 void AVFWrapper::destroyVideoLayer()
1991 {
1992     ASSERT(isMainThread());
1993     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1994     LOG(Media, "AVFWrapper::destroyVideoLayer(%p)", this);
1995     m_layerClient = nullptr;
1996     m_caVideoLayer = nullptr;
1997     m_videoLayerWrapper = nullptr;
1998     if (!m_avCFVideoLayer.get())
1999         return;
2000
2001     AVCFPlayerLayerSetPlayer((AVCFPlayerLayerRef)m_avCFVideoLayer.get(), nullptr);
2002     m_avCFVideoLayer = nullptr;
2003 }
2004
2005 void AVFWrapper::setVideoLayerNeedsCommit()
2006 {
2007     if (m_videoLayerWrapper)
2008         m_videoLayerWrapper->setNeedsCommit();
2009 }
2010
2011 void AVFWrapper::setVideoLayerHidden(bool value)
2012 {
2013     if (m_videoLayerWrapper)
2014         m_videoLayerWrapper->setHidden(value);
2015 }
2016
2017 void AVFWrapper::createImageGenerator()
2018 {
2019     ASSERT(isMainThread());
2020     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
2021     if (!avAsset() || m_imageGenerator)
2022         return;
2023
2024     m_imageGenerator = adoptCF(AVCFAssetImageGeneratorCreateWithAsset(kCFAllocatorDefault, avAsset()));
2025
2026     AVCFAssetImageGeneratorSetApertureMode(m_imageGenerator.get(), AVCFAssetImageGeneratorApertureModeCleanAperture);
2027     AVCFAssetImageGeneratorSetRequestedTimeToleranceBefore(m_imageGenerator.get(), kCMTimeZero);
2028     AVCFAssetImageGeneratorSetRequestedTimeToleranceAfter(m_imageGenerator.get(), kCMTimeZero);
2029     AVCFAssetImageGeneratorSetAppliesPreferredTrackTransform(m_imageGenerator.get(), true);
2030
2031     LOG(Media, "AVFWrapper::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
2032 }
2033
2034 void AVFWrapper::destroyImageGenerator()
2035 {
2036     ASSERT(isMainThread());
2037     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
2038     LOG(Media, "AVFWrapper::destroyImageGenerator(%p)", this);
2039     m_imageGenerator = 0;
2040 }
2041
2042 RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(const MediaTime& time, const FloatRect& rect)
2043 {
2044     if (!m_imageGenerator)
2045         return 0;
2046
2047 #if !LOG_DISABLED
2048     double start = monotonicallyIncreasingTime();
2049 #endif
2050
2051     AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size()));
2052     RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), toCMTime(time), 0, 0));
2053     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawimage.get(), adoptCF(CGColorSpaceCreateDeviceRGB()).get()));
2054
2055 #if !LOG_DISABLED
2056     double duration = monotonicallyIncreasingTime() - start;
2057     LOG(Media, "AVFWrapper::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
2058 #endif
2059
2060     return image;
2061 }
2062
2063 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2064 AVCFMediaSelectionGroupRef AVFWrapper::safeMediaSelectionGroupForLegibleMedia() const
2065 {
2066     if (!avAsset())
2067         return 0;
2068
2069     if (AVCFAssetGetStatusOfValueForProperty(avAsset(), AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) != AVCFPropertyValueStatusLoaded)
2070         return 0;
2071
2072     return AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(), AVCFMediaCharacteristicLegible);
2073 }
2074 #endif
2075
2076 void AVFWrapper::updateVideoLayerGravity()
2077 {
2078     // We should call AVCFPlayerLayerSetVideoGravity() here, but it is not yet implemented.
2079     // FIXME: <rdar://problem/14884340>
2080 }
2081
2082 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(ENCRYPTED_MEDIA_V2)
2083 void AVFWrapper::setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest)
2084 {
2085     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2086     if (requestsIterator != m_keyURIToRequestMap.end()) {
2087         requestsIterator->value.append(avRequest);
2088         return;
2089     }
2090
2091     Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>> requests;
2092     requests.append(avRequest);
2093     m_keyURIToRequestMap.set(keyURI, requests);
2094 }
2095
2096 RetainPtr<AVCFAssetResourceLoadingRequestRef> AVFWrapper::takeRequestForKeyURI(const String& keyURI)
2097 {
2098     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2099     if (requestsIterator == m_keyURIToRequestMap.end())
2100         return RetainPtr<AVCFAssetResourceLoadingRequestRef>();
2101
2102     auto request = requestsIterator->value.takeLast();
2103     if (requestsIterator->value.isEmpty())
2104         m_keyURIToRequestMap.take(keyURI);
2105
2106     return request;
2107 }
2108 #endif
2109
2110 void LayerClient::platformCALayerLayoutSublayersOfLayer(PlatformCALayer* wrapperLayer)
2111 {
2112     ASSERT(isMainThread());
2113     ASSERT(m_parent);
2114     ASSERT(m_parent->videoLayerWrapper() == wrapperLayer->platformLayer());
2115
2116     CGRect bounds = wrapperLayer->bounds();
2117     CGPoint anchor = CACFLayerGetAnchorPoint(m_parent->caVideoLayer());
2118     FloatPoint position(bounds.size.width * anchor.x, bounds.size.height * anchor.y); 
2119
2120     CACFLayerSetPosition(m_parent->caVideoLayer(), position);
2121     CACFLayerSetBounds(m_parent->caVideoLayer(), bounds);
2122
2123     AVCFPlayerLayerSetFrame(m_parent->videoLayer(), CGRectMake(0, 0, bounds.size.width, bounds.size.height));
2124 }
2125
2126 } // namespace WebCore
2127
2128 #else
2129 // AVFoundation should always be enabled for Apple production builds.
2130 #if __PRODUCTION__ && !USE(AVFOUNDATION)
2131 #error AVFoundation is not enabled!
2132 #endif // __PRODUCTION__ && !USE(AVFOUNDATION)
2133 #endif // USE(AVFOUNDATION)
2134 #endif // PLATFORM(WIN) && ENABLE(VIDEO)