Move URL from WebCore to WTF
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / cf / MediaPlayerPrivateAVFoundationCF.cpp
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if PLATFORM(WIN) && ENABLE(VIDEO) 
29
30 #if USE(AVFOUNDATION)
31
32 #include "MediaPlayerPrivateAVFoundationCF.h"
33
34 #include "ApplicationCacheResource.h"
35 #include "CDMSessionAVFoundationCF.h"
36 #include "COMPtr.h"
37 #include "FloatConversion.h"
38 #include "GraphicsContext.h"
39 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
40 #include "InbandTextTrackPrivateAVCF.h"
41 #else
42 #include "InbandTextTrackPrivateLegacyAVCF.h"
43 #endif
44 #include "Logging.h"
45 #include "PlatformCALayerClient.h"
46 #include "PlatformCALayerWin.h"
47 #include "TimeRanges.h"
48 #include "WebCoreAVCFResourceLoader.h"
49 #include <pal/avfoundation/MediaTimeAVFoundation.h>
50 #include <wtf/URL.h>
51
52 #include <AVFoundationCF/AVCFPlayerItem.h>
53 #if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
54 #include <AVFoundationCF/AVCFPlayerItemLegibleOutput.h>
55 #endif
56 #include <AVFoundationCF/AVCFPlayerLayer.h>
57 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
58 #include <AVFoundationCF/AVCFAssetResourceLoader.h>
59 #endif
60 #include <AVFoundationCF/AVFoundationCF.h>
61 #include <d3d9.h>
62 #include <delayimp.h>
63 #include <dispatch/dispatch.h>
64 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
65 #include <JavaScriptCore/DataView.h>
66 #include <JavaScriptCore/JSCInlines.h>
67 #include <JavaScriptCore/TypedArrayInlines.h>
68 #include <JavaScriptCore/Uint16Array.h>
69 #endif
70 #include <wtf/HashMap.h>
71 #include <wtf/NeverDestroyed.h>
72 #include <wtf/Threading.h>
73 #include <wtf/text/CString.h>
74 #include <wtf/text/StringView.h>
75 #include <wtf/StringPrintStream.h>
76
77 // Soft-linking headers must be included last since they #define functions, constants, etc.
78 #include "AVFoundationCFSoftLinking.h"
79 #include <pal/cf/CoreMediaSoftLink.h>
80
81 // We don't bother softlinking against libdispatch since it's already been loaded by AAS.
82 #ifdef DEBUG_ALL
83 #pragma comment(lib, "libdispatch_debug.lib")
84 #else
85 #pragma comment(lib, "libdispatch.lib")
86 #endif
87
88 enum {
89     AVAssetReferenceRestrictionForbidRemoteReferenceToLocal = (1UL << 0),
90     AVAssetReferenceRestrictionForbidLocalReferenceToRemote = (1UL << 1)
91 };
92
93
94 namespace WebCore {
95 using namespace std;
96 using namespace PAL;
97
98 class LayerClient;
99
100 class AVFWrapper {
101 public:
102     AVFWrapper(MediaPlayerPrivateAVFoundationCF*);
103     ~AVFWrapper();
104
105     void scheduleDisconnectAndDelete();
106
107     void createAVCFVideoLayer();
108     void destroyVideoLayer();
109     PlatformLayer* platformLayer();
110
111     CACFLayerRef caVideoLayer() { return m_caVideoLayer.get(); }
112     PlatformLayer* videoLayerWrapper() { return m_videoLayerWrapper ? m_videoLayerWrapper->platformLayer() : 0; };
113     void setVideoLayerNeedsCommit();
114     void setVideoLayerHidden(bool);
115
116     void createImageGenerator();
117     void destroyImageGenerator();
118     RetainPtr<CGImageRef> createImageForTimeInRect(const MediaTime&, const FloatRect&);
119
120     void createAssetForURL(const URL&, bool inheritURI);
121     void setAsset(AVCFURLAssetRef);
122     
123     void createPlayer(IDirect3DDevice9*);
124     void createPlayerItem();
125     
126     void checkPlayability();
127     void beginLoadingMetadata();
128     
129     void seekToTime(const MediaTime&, const MediaTime&, const MediaTime&);
130     void updateVideoLayerGravity();
131
132     void setCurrentTextTrack(InbandTextTrackPrivateAVF*);
133     InbandTextTrackPrivateAVF* currentTextTrack() const { return m_currentTextTrack; }
134
135 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
136     static void legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef, CFArrayRef attributedString, CFArrayRef nativeSampleBuffers, CMTime itemTime);
137     static void processCue(void* context);
138 #endif
139 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
140     static Boolean resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef, AVCFAssetResourceLoadingRequestRef, void* context);
141 #endif
142     static void loadMetadataCompletionCallback(AVCFAssetRef, void*);
143     static void loadPlayableCompletionCallback(AVCFAssetRef, void*);
144     static void periodicTimeObserverCallback(AVCFPlayerRef, CMTime, void*);
145     static void seekCompletedCallback(AVCFPlayerItemRef, Boolean, void*);
146     static void notificationCallback(CFNotificationCenterRef, void*, CFStringRef, const void*, CFDictionaryRef);
147     static void processNotification(void* context);
148
149     inline AVCFPlayerLayerRef videoLayer() const { return (AVCFPlayerLayerRef)m_avCFVideoLayer.get(); }
150     inline AVCFPlayerRef avPlayer() const { return (AVCFPlayerRef)m_avPlayer.get(); }
151     inline AVCFURLAssetRef avAsset() const { return (AVCFURLAssetRef)m_avAsset.get(); }
152     inline AVCFPlayerItemRef avPlayerItem() const { return (AVCFPlayerItemRef)m_avPlayerItem.get(); }
153     inline AVCFPlayerObserverRef timeObserver() const { return (AVCFPlayerObserverRef)m_timeObserver.get(); }
154     inline AVCFAssetImageGeneratorRef imageGenerator() const { return m_imageGenerator.get(); }
155 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
156     inline AVCFPlayerItemLegibleOutputRef legibleOutput() const { return m_legibleOutput.get(); }
157     AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia() const;
158 #endif
159     inline dispatch_queue_t dispatchQueue() const { return m_notificationQueue; }
160
161 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
162     RetainPtr<AVCFAssetResourceLoadingRequestRef> takeRequestForKeyURI(const String&);
163     void setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest);
164 #endif
165
166 private:
167     inline void* callbackContext() const { return reinterpret_cast<void*>(m_objectID); }
168
169     static Lock& mapLock();
170     static HashMap<uintptr_t, AVFWrapper*>& map();
171     static AVFWrapper* avfWrapperForCallbackContext(void*);
172     void addToMap();
173     void removeFromMap() const;
174 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
175     bool shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest);
176     static void processShouldWaitForLoadingOfResource(void* context);
177 #endif
178
179     static void disconnectAndDeleteAVFWrapper(void*);
180
181     static uintptr_t s_nextAVFWrapperObjectID;
182     uintptr_t m_objectID;
183
184     MediaPlayerPrivateAVFoundationCF* m_owner;
185
186     RetainPtr<AVCFPlayerRef> m_avPlayer;
187     RetainPtr<AVCFURLAssetRef> m_avAsset;
188     RetainPtr<AVCFPlayerItemRef> m_avPlayerItem;
189     RetainPtr<AVCFPlayerLayerRef> m_avCFVideoLayer;
190     RetainPtr<AVCFPlayerObserverRef> m_timeObserver;
191     RetainPtr<AVCFAssetImageGeneratorRef> m_imageGenerator;
192 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
193     RetainPtr<AVCFPlayerItemLegibleOutputRef> m_legibleOutput;
194     RetainPtr<AVCFMediaSelectionGroupRef> m_selectionGroup;
195 #endif
196
197     dispatch_queue_t m_notificationQueue;
198
199     mutable RetainPtr<CACFLayerRef> m_caVideoLayer;
200     RefPtr<PlatformCALayer> m_videoLayerWrapper;
201
202     std::unique_ptr<LayerClient> m_layerClient;
203     COMPtr<IDirect3DDevice9Ex> m_d3dDevice;
204
205     InbandTextTrackPrivateAVF* m_currentTextTrack;
206
207 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
208     HashMap<String, Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>>> m_keyURIToRequestMap;
209     AVCFAssetResourceLoaderCallbacks m_resourceLoaderCallbacks;
210 #endif
211 };
212
213 uintptr_t AVFWrapper::s_nextAVFWrapperObjectID;
214
215 class LayerClient : public PlatformCALayerClient {
216 public:
217     LayerClient(AVFWrapper* parent) : m_parent(parent) { }
218     virtual ~LayerClient() { m_parent = 0; }
219
220 private:
221     virtual void platformCALayerLayoutSublayersOfLayer(PlatformCALayer*);
222     virtual bool platformCALayerRespondsToLayoutChanges() const { return true; }
223
224     virtual void platformCALayerAnimationStarted(MonotonicTime beginTime) { }
225     virtual GraphicsLayer::CompositingCoordinatesOrientation platformCALayerContentsOrientation() const { return GraphicsLayer::CompositingCoordinatesOrientation::TopDown; }
226     virtual void platformCALayerPaintContents(PlatformCALayer*, GraphicsContext&, const FloatRect&, GraphicsLayerPaintBehavior) { }
227     virtual bool platformCALayerShowDebugBorders() const { return false; }
228     virtual bool platformCALayerShowRepaintCounter(PlatformCALayer*) const { return false; }
229     virtual int platformCALayerIncrementRepaintCount(PlatformCALayer*) { return 0; }
230
231     virtual bool platformCALayerContentsOpaque() const { return false; }
232     virtual bool platformCALayerDrawsContent() const { return false; }
233     virtual float platformCALayerDeviceScaleFactor() const { return 1; }
234
235     AVFWrapper* m_parent;
236 };
237
238 #if !LOG_DISABLED
239 static const char* boolString(bool val)
240 {
241     return val ? "true" : "false";
242 }
243 #endif
244
245 static CFArrayRef createMetadataKeyNames()
246 {
247     static const CFStringRef keyNames[] = {
248         AVCFAssetPropertyDuration,
249         AVCFAssetPropertyNaturalSize,
250         AVCFAssetPropertyPreferredTransform,
251         AVCFAssetPropertyPreferredRate,
252         AVCFAssetPropertyPlayable,
253         AVCFAssetPropertyTracks,
254 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
255         AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions,
256 #endif
257     };
258     
259     return CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
260 }
261
262 static CFArrayRef metadataKeyNames()
263 {
264     static CFArrayRef keys = createMetadataKeyNames();
265     return keys;
266 }
267
268 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
269 static CFStringRef CMTimeRangeStartKey()
270 {
271     return CFSTR("start");
272 }
273
274 // FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key.
275 static CFStringRef CMTimeRangeDurationKey()
276 {
277     return CFSTR("duration");
278 }
279
280 // FIXME: It would be better if AVCF exported this notification name.
281 static CFStringRef CACFContextNeedsFlushNotification()
282 {
283     return CFSTR("kCACFContextNeedsFlushNotification");
284 }
285
286 // Define AVCF object accessors as inline functions here instead of in MediaPlayerPrivateAVFoundationCF so we don't have
287 // to include the AVCF headers in MediaPlayerPrivateAVFoundationCF.h
288 inline AVCFPlayerLayerRef videoLayer(AVFWrapper* wrapper)
289
290     return wrapper ? wrapper->videoLayer() : 0; 
291 }
292
293 inline AVCFPlayerRef avPlayer(AVFWrapper* wrapper)
294
295     return wrapper ? wrapper->avPlayer() : 0; 
296 }
297
298 inline AVCFURLAssetRef avAsset(AVFWrapper* wrapper)
299
300     return wrapper ? wrapper->avAsset() : 0; 
301 }
302
303 inline AVCFPlayerItemRef avPlayerItem(AVFWrapper* wrapper)
304
305     return wrapper ? wrapper->avPlayerItem() : 0; 
306 }
307
308 inline AVCFAssetImageGeneratorRef imageGenerator(AVFWrapper* wrapper)
309
310     return wrapper ? wrapper->imageGenerator() : 0; 
311 }
312
313 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
314 inline AVCFPlayerItemLegibleOutputRef avLegibleOutput(AVFWrapper* wrapper)
315 {
316     return wrapper ? wrapper->legibleOutput() : 0;
317 }
318
319 inline AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia(AVFWrapper* wrapper)
320 {
321     return wrapper ? wrapper->safeMediaSelectionGroupForLegibleMedia() : 0;
322 }
323 #endif
324
325 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
326 static dispatch_queue_t globalQueue = nullptr;
327
328 static void initGlobalLoaderDelegateQueue(void* ctx)
329 {
330     globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
331 }
332
333 static dispatch_queue_t globalLoaderDelegateQueue()
334 {
335     static dispatch_once_t onceToken;
336
337     dispatch_once_f(&onceToken, nullptr, initGlobalLoaderDelegateQueue);
338
339     return globalQueue;
340 }
341 #endif
342
343 void MediaPlayerPrivateAVFoundationCF::registerMediaEngine(MediaEngineRegistrar registrar)
344 {
345     if (isAvailable())
346         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationCF>(player); },
347             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
348 }
349
350 MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(MediaPlayer* player)
351     : MediaPlayerPrivateAVFoundation(player)
352     , m_avfWrapper(0)
353     , m_videoFrameHasDrawn(false)
354 {
355     LOG(Media, "MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(%p)", this);
356 }
357
358 MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF()
359 {
360     LOG(Media, "MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF(%p)", this);
361 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
362     for (auto& pair : m_resourceLoaderMap)
363         pair.value->invalidate();
364 #endif
365     cancelLoad();
366 }
367
368 void MediaPlayerPrivateAVFoundationCF::cancelLoad()
369 {
370     LOG(Media, "MediaPlayerPrivateAVFoundationCF::cancelLoad(%p)", this);
371
372     // Do nothing when our cancellation of pending loading calls its completion handler
373     setDelayCallbacks(true);
374     setIgnoreLoadStateChanges(true);
375
376     tearDownVideoRendering();
377
378     clearTextTracks();
379
380     if (m_avfWrapper) {
381         // The AVCF objects have to be destroyed on the same dispatch queue used for notifications, so schedule a call to 
382         // disconnectAndDeleteAVFWrapper on that queue. 
383         m_avfWrapper->scheduleDisconnectAndDelete();
384         m_avfWrapper = 0;
385     }
386
387     setIgnoreLoadStateChanges(false);
388     setDelayCallbacks(false);
389 }
390
391 void MediaPlayerPrivateAVFoundationCF::updateVideoLayerGravity()
392 {
393     ASSERT(supportsAcceleratedRendering());
394
395     if (m_avfWrapper)
396         m_avfWrapper->updateVideoLayerGravity();
397 }
398
399 bool MediaPlayerPrivateAVFoundationCF::hasLayerRenderer() const
400 {
401     return videoLayer(m_avfWrapper);
402 }
403
404 bool MediaPlayerPrivateAVFoundationCF::hasContextRenderer() const
405 {
406     return imageGenerator(m_avfWrapper);
407 }
408
409 void MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer()
410 {
411     LOG(Media, "MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer(%p)", this);
412     ASSERT(isMainThread());
413
414     if (imageGenerator(m_avfWrapper))
415         return;
416
417     if (m_avfWrapper)
418         m_avfWrapper->createImageGenerator();
419 }
420
421 void MediaPlayerPrivateAVFoundationCF::destroyContextVideoRenderer()
422 {
423     ASSERT(isMainThread());
424     if (m_avfWrapper)
425         m_avfWrapper->destroyImageGenerator();
426 }
427
428 void MediaPlayerPrivateAVFoundationCF::createVideoLayer()
429 {
430     ASSERT(isMainThread());
431     ASSERT(supportsAcceleratedRendering());
432
433     if (m_avfWrapper)
434         m_avfWrapper->createAVCFVideoLayer();
435 }
436
437 void MediaPlayerPrivateAVFoundationCF::destroyVideoLayer()
438 {
439     ASSERT(isMainThread());
440     LOG(Media, "MediaPlayerPrivateAVFoundationCF::destroyVideoLayer(%p) - destroying %p", this, videoLayer(m_avfWrapper));
441     if (m_avfWrapper)
442         m_avfWrapper->destroyVideoLayer();
443 }
444
445 bool MediaPlayerPrivateAVFoundationCF::hasAvailableVideoFrame() const
446 {
447     return (m_videoFrameHasDrawn || (videoLayer(m_avfWrapper) && AVCFPlayerLayerIsReadyForDisplay(videoLayer(m_avfWrapper))));
448 }
449
450 void MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
451 {
452     if (m_avfWrapper)
453         m_avfWrapper->setCurrentTextTrack(track);
454 }
455
456 InbandTextTrackPrivateAVF* MediaPlayerPrivateAVFoundationCF::currentTextTrack() const
457 {
458     if (m_avfWrapper)
459         return m_avfWrapper->currentTextTrack();
460
461     return 0;
462 }
463
464 void MediaPlayerPrivateAVFoundationCF::createAVAssetForURL(const URL& url)
465 {
466     ASSERT(!m_avfWrapper);
467
468     setDelayCallbacks(true);
469
470     bool inheritURI = player()->doesHaveAttribute("x-itunes-inherit-uri-query-component");
471
472     m_avfWrapper = new AVFWrapper(this);
473     m_avfWrapper->createAssetForURL(url, inheritURI);
474     setDelayCallbacks(false);
475 }
476
477 void MediaPlayerPrivateAVFoundationCF::createAVPlayer()
478 {
479     ASSERT(isMainThread());
480     ASSERT(m_avfWrapper);
481     
482     setDelayCallbacks(true);
483     m_avfWrapper->createPlayer(reinterpret_cast<IDirect3DDevice9*>(player()->graphicsDeviceAdapter()));
484     setDelayCallbacks(false);
485 }
486
487 void MediaPlayerPrivateAVFoundationCF::createAVPlayerItem()
488 {
489     ASSERT(isMainThread());
490     ASSERT(m_avfWrapper);
491     
492     setDelayCallbacks(true);
493     m_avfWrapper->createPlayerItem();
494
495     setDelayCallbacks(false);
496 }
497
498 void MediaPlayerPrivateAVFoundationCF::checkPlayability()
499 {
500     ASSERT(m_avfWrapper);
501     m_avfWrapper->checkPlayability();
502 }
503
504 void MediaPlayerPrivateAVFoundationCF::beginLoadingMetadata()
505 {
506     ASSERT(m_avfWrapper);
507     m_avfWrapper->beginLoadingMetadata();
508 }
509
510 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationCF::playerItemStatus() const
511 {
512     if (!avPlayerItem(m_avfWrapper))
513         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
514
515     AVCFPlayerItemStatus status = AVCFPlayerItemGetStatus(avPlayerItem(m_avfWrapper), 0);
516     if (status == AVCFPlayerItemStatusUnknown)
517         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
518     if (status == AVCFPlayerItemStatusFailed)
519         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
520     if (AVCFPlayerItemIsPlaybackLikelyToKeepUp(avPlayerItem(m_avfWrapper)))
521         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
522     if (AVCFPlayerItemIsPlaybackBufferFull(avPlayerItem(m_avfWrapper)))
523         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
524     if (AVCFPlayerItemIsPlaybackBufferEmpty(avPlayerItem(m_avfWrapper)))
525         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
526     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
527 }
528
529 PlatformLayer* MediaPlayerPrivateAVFoundationCF::platformLayer() const
530 {
531     ASSERT(isMainThread());
532     if (!m_avfWrapper)
533         return 0;
534
535     return m_avfWrapper->platformLayer();
536 }
537
538 void MediaPlayerPrivateAVFoundationCF::platformSetVisible(bool isVisible)
539 {
540     ASSERT(isMainThread());
541     if (!m_avfWrapper)
542         return;
543     
544     // FIXME: We use a CATransaction here on the Mac, we need to figure out why this was done there and
545     // whether we're affected by the same issue.
546     setDelayCallbacks(true);
547     m_avfWrapper->setVideoLayerHidden(!isVisible);    
548     if (!isVisible)
549         tearDownVideoRendering();
550     setDelayCallbacks(false);
551 }
552
553 void MediaPlayerPrivateAVFoundationCF::platformPlay()
554 {
555     LOG(Media, "MediaPlayerPrivateAVFoundationCF::play(%p)", this);
556     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
557         return;
558
559     setDelayCallbacks(true);
560     AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate());
561     setDelayCallbacks(false);
562 }
563
564 void MediaPlayerPrivateAVFoundationCF::platformPause()
565 {
566     LOG(Media, "MediaPlayerPrivateAVFoundationCF::pause(%p)", this);
567     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
568         return;
569
570     setDelayCallbacks(true);
571     AVCFPlayerSetRate(avPlayer(m_avfWrapper), 0);
572     setDelayCallbacks(false);
573 }
574
575 MediaTime MediaPlayerPrivateAVFoundationCF::platformDuration() const
576 {
577     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
578         return MediaTime::zeroTime();
579
580     CMTime cmDuration;
581
582     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
583     if (avPlayerItem(m_avfWrapper) && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
584         cmDuration = AVCFPlayerItemGetDuration(avPlayerItem(m_avfWrapper));
585     else
586         cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper));
587
588     if (CMTIME_IS_NUMERIC(cmDuration))
589         return PAL::toMediaTime(cmDuration);
590
591     if (CMTIME_IS_INDEFINITE(cmDuration))
592         return MediaTime::positiveInfiniteTime();
593
594     LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
595     return MediaTime::invalidTime();
596 }
597
598 MediaTime MediaPlayerPrivateAVFoundationCF::currentMediaTime() const
599 {
600     if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper))
601         return MediaTime::zeroTime();
602
603     CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper));
604     if (CMTIME_IS_NUMERIC(itemTime))
605         return max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
606
607     return MediaTime::zeroTime();
608 }
609
610 void MediaPlayerPrivateAVFoundationCF::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
611 {
612     if (!m_avfWrapper)
613         return;
614     
615     // seekToTime generates several event callbacks, update afterwards.
616     setDelayCallbacks(true);
617     m_avfWrapper->seekToTime(time, negativeTolerance, positiveTolerance);
618     setDelayCallbacks(false);
619 }
620
621 void MediaPlayerPrivateAVFoundationCF::setVolume(float volume)
622 {
623     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
624         return;
625
626     AVCFPlayerSetVolume(avPlayer(m_avfWrapper), volume);
627 }
628
629 void MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(bool closedCaptionsVisible)
630 {
631     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
632         return;
633
634     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible));
635     AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(m_avfWrapper), closedCaptionsVisible);
636 }
637
638 void MediaPlayerPrivateAVFoundationCF::setRate(float rate)
639 {
640     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setRate(%p) - rate: %f", this, rate);
641     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
642         return;
643
644     setDelayCallbacks(true);
645     AVCFPlayerSetRate(avPlayer(m_avfWrapper), rate);
646     setDelayCallbacks(false);
647 }
648
649 double MediaPlayerPrivateAVFoundationCF::rate() const
650 {
651     if (!metaDataAvailable() || !avPlayer(m_avfWrapper))
652         return 0;
653
654     setDelayCallbacks(true);
655     double currentRate = AVCFPlayerGetRate(avPlayer(m_avfWrapper));
656     setDelayCallbacks(false);
657
658     return currentRate;
659 }
660
661 static bool timeRangeIsValidAndNotEmpty(CMTime start, CMTime duration)
662 {
663     // Is the range valid?
664     if (!CMTIME_IS_VALID(start) || !CMTIME_IS_VALID(duration) || duration.epoch || duration.value < 0)
665         return false;
666
667     if (CMTIME_COMPARE_INLINE(duration, ==, kCMTimeZero))
668         return false;
669
670     return true;
671 }
672
673 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationCF::platformBufferedTimeRanges() const
674 {
675     auto timeRanges = std::make_unique<PlatformTimeRanges>();
676
677     if (!avPlayerItem(m_avfWrapper))
678         return timeRanges;
679
680     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
681     if (!loadedRanges)
682         return timeRanges;
683
684     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
685     for (CFIndex i = 0; i < rangeCount; i++) {
686         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
687         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
688         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
689         
690         if (timeRangeIsValidAndNotEmpty(start, duration)) {
691             MediaTime rangeStart = PAL::toMediaTime(start);
692             MediaTime rangeEnd = rangeStart + PAL::toMediaTime(duration);
693             timeRanges->add(rangeStart, rangeEnd);
694         }
695     }
696
697     return timeRanges;
698 }
699
700 MediaTime MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 
701
702     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
703     if (!seekableRanges) 
704         return MediaTime::zeroTime(); 
705
706     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
707     bool hasValidRange = false; 
708     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
709     for (CFIndex i = 0; i < rangeCount; i++) {
710         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
711         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
712         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
713         if (!timeRangeIsValidAndNotEmpty(start, duration))
714             continue;
715
716         hasValidRange = true; 
717         MediaTime startOfRange = PAL::toMediaTime(start);
718         if (minTimeSeekable > startOfRange) 
719             minTimeSeekable = startOfRange; 
720     } 
721     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime(); 
722
723
724 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
725 {
726     if (!avPlayerItem(m_avfWrapper))
727         return MediaTime::zeroTime();
728
729     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
730     if (!seekableRanges)
731         return MediaTime::zeroTime();
732
733     MediaTime maxTimeSeekable;
734     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
735     for (CFIndex i = 0; i < rangeCount; i++) {
736         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
737         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
738         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
739         if (!timeRangeIsValidAndNotEmpty(start, duration))
740             continue;
741         
742         MediaTime endOfRange = PAL::toMediaTime(CMTimeAdd(start, duration));
743         if (maxTimeSeekable < endOfRange)
744             maxTimeSeekable = endOfRange;
745     }
746
747     return maxTimeSeekable;   
748 }
749
750 MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
751 {
752     if (!avPlayerItem(m_avfWrapper))
753         return MediaTime::zeroTime();
754
755     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
756     if (!loadedRanges)
757         return MediaTime::zeroTime();
758
759     MediaTime maxTimeLoaded;
760     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
761     for (CFIndex i = 0; i < rangeCount; i++) {
762         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
763         CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey())));
764         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
765         if (!timeRangeIsValidAndNotEmpty(start, duration))
766             continue;
767         
768         MediaTime endOfRange = PAL::toMediaTime(CMTimeAdd(start, duration));
769         if (maxTimeLoaded < endOfRange)
770             maxTimeLoaded = endOfRange;
771     }
772
773     return maxTimeLoaded;   
774 }
775
776 unsigned long long MediaPlayerPrivateAVFoundationCF::totalBytes() const
777 {
778     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
779         return 0;
780
781     int64_t totalMediaSize = 0;
782     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyAssetTracks(avAsset(m_avfWrapper)));
783     CFIndex trackCount = CFArrayGetCount(tracks.get());
784     for (CFIndex i = 0; i < trackCount; i++) {
785         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), i);
786         totalMediaSize += AVCFAssetTrackGetTotalSampleDataLength(assetTrack);
787     }
788
789     return static_cast<unsigned long long>(totalMediaSize);
790 }
791
792 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationCF::assetStatus() const
793 {
794     if (!avAsset(m_avfWrapper))
795         return MediaPlayerAVAssetStatusDoesNotExist;
796
797     // First, make sure all metadata properties we rely on are loaded.
798     CFArrayRef keys = metadataKeyNames();
799     CFIndex keyCount = CFArrayGetCount(keys);
800     for (CFIndex i = 0; i < keyCount; i++) {
801         CFStringRef keyName = static_cast<CFStringRef>(CFArrayGetValueAtIndex(keys, i));
802         AVCFPropertyValueStatus keyStatus = AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), keyName, 0);
803
804         if (keyStatus < AVCFPropertyValueStatusLoaded)
805             return MediaPlayerAVAssetStatusLoading;
806         if (keyStatus == AVCFPropertyValueStatusFailed) {
807             if (CFStringCompare(keyName, AVCFAssetPropertyNaturalSize, 0) == kCFCompareEqualTo) {
808                 // Don't treat a failure to retrieve @"naturalSize" as fatal. We will use @"presentationSize" instead.
809                 // <rdar://problem/15966685>
810                 continue;
811             }
812 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
813             if (CFStringCompare(keyName, AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) == kCFCompareEqualTo) {
814                 // On Windows, the media selection options are not available when initially interacting with a streaming source.
815                 // <rdar://problem/16160699>
816                 continue;
817             }
818 #endif
819             return MediaPlayerAVAssetStatusFailed;
820         }
821         if (keyStatus == AVCFPropertyValueStatusCancelled)
822             return MediaPlayerAVAssetStatusCancelled;
823     }
824
825     if (AVCFAssetIsPlayable(avAsset(m_avfWrapper)))
826         return MediaPlayerAVAssetStatusPlayable;
827
828     return MediaPlayerAVAssetStatusLoaded;
829 }
830
831 void MediaPlayerPrivateAVFoundationCF::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
832 {
833     ASSERT(isMainThread());
834     if (!metaDataAvailable() || context.paintingDisabled())
835         return;
836
837     if (currentRenderingMode() == MediaRenderingToLayer && !imageGenerator(m_avfWrapper)) {
838         // We're being told to render into a context, but we already have the
839         // video layer, which probably means we've been called from <canvas>.
840         createContextVideoRenderer();
841     }
842
843     paint(context, rect);
844 }
845
846 void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext& context, const FloatRect& rect)
847 {
848     ASSERT(isMainThread());
849     if (!metaDataAvailable() || context.paintingDisabled() || !imageGenerator(m_avfWrapper))
850         return;
851
852     LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this);
853
854     setDelayCallbacks(true);
855     RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentMediaTime(), rect);
856     if (image) {
857         context.save();
858         context.translate(rect.x(), rect.y() + rect.height());
859         context.scale(FloatSize(1.0f, -1.0f));
860         context.setImageInterpolationQuality(InterpolationLow);
861         FloatRect paintRect(FloatPoint(), rect.size());
862 #if USE(DIRECT2D)
863         notImplemented();
864 #else
865         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
866 #endif
867         context.restore();
868         image = 0;
869     }
870     setDelayCallbacks(false);
871     
872     m_videoFrameHasDrawn = true;
873 }
874
875 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
876
877 static bool keySystemIsSupported(const String& keySystem)
878 {
879     return equalLettersIgnoringASCIICase(keySystem, "com.apple.fps")
880         || equalLettersIgnoringASCIICase(keySystem, "com.apple.fps.1_0");
881 }
882
883 #endif
884
885 static const HashSet<String, ASCIICaseInsensitiveHash>& avfMIMETypes()
886 {
887     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []() {
888         HashSet<String, ASCIICaseInsensitiveHash> types;
889         RetainPtr<CFArrayRef> avTypes = adoptCF(AVCFURLAssetCopyAudiovisualMIMETypes());
890         CFIndex typeCount = CFArrayGetCount(avTypes.get());
891         for (CFIndex i = 0; i < typeCount; ++i)
892             types.add((CFStringRef)CFArrayGetValueAtIndex(avTypes.get(), i));
893         return types;
894     }();
895
896     return cache;
897 }
898
899 void MediaPlayerPrivateAVFoundationCF::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
900 {
901     supportedTypes = avfMIMETypes();
902 }
903
904 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationCF::supportsType(const MediaEngineSupportParameters& parameters)
905 {
906     auto containerType = parameters.type.containerType();
907     if (isUnsupportedMIMEType(containerType))
908         return MediaPlayer::IsNotSupported;
909
910     if (!staticMIMETypeList().contains(containerType) && !avfMIMETypes().contains(containerType))
911         return MediaPlayer::IsNotSupported;
912
913     auto codecs = parameters.type.parameter(ContentType::codecsParameter());
914 #if HAVE(AVCFURL_PLAYABLE_MIMETYPE)
915     // The spec says:
916     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
917     if (codecs.isEmpty())
918         return MediaPlayer::MayBeSupported;
919
920     String typeString = containerType + "; codecs=\"" + codecs + "\"";
921     return AVCFURLAssetIsPlayableExtendedMIMEType(typeString.createCFString().get()) ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
922 #else
923     if (avfMIMETypes().contains(containerType))
924         return codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
925     return MediaPlayer::IsNotSupported;
926 #endif
927 }
928
929 bool MediaPlayerPrivateAVFoundationCF::supportsKeySystem(const String& keySystem, const String& mimeType)
930 {
931 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
932     if (keySystem.isEmpty())
933         return false;
934
935     if (!keySystemIsSupported(keySystem))
936         return false;
937
938     if (!mimeType.isEmpty() && !avfMIMETypes().contains(mimeType))
939         return false;
940
941     return true;
942 #else
943     UNUSED_PARAM(keySystem);
944     UNUSED_PARAM(mimeType);
945     return false;
946 #endif
947 }
948
949 bool MediaPlayerPrivateAVFoundationCF::isAvailable()
950 {
951     return AVFoundationCFLibrary() && isCoreMediaFrameworkAvailable();
952 }
953
954 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
955 void MediaPlayerPrivateAVFoundationCF::didCancelLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
956 {
957     WebCoreAVCFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
958
959     if (resourceLoader)
960         resourceLoader->stopLoading();
961 }
962
963 void MediaPlayerPrivateAVFoundationCF::didStopLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest)
964 {
965     m_resourceLoaderMap.remove(avRequest);
966 }
967 #endif
968
969 MediaTime MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(const MediaTime& timeValue) const
970 {
971     if (!metaDataAvailable())
972         return timeValue;
973
974     // FIXME - can not implement until rdar://8721669 is fixed.
975     return timeValue;
976 }
977
978 void MediaPlayerPrivateAVFoundationCF::tracksChanged()
979 {
980     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
981     m_languageOfPrimaryAudioTrack = String();
982
983     if (!avAsset(m_avfWrapper))
984         return;
985
986     setDelayCharacteristicsChangedNotification(true);
987
988     bool haveCCTrack = false;
989     bool hasCaptions = false;
990
991     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
992     // asked about those fairly frequently.
993     if (!avPlayerItem(m_avfWrapper)) {
994         // We don't have a player item yet, so check with the asset because some assets support inspection
995         // prior to becoming ready to play.
996         RetainPtr<CFArrayRef> visualTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
997         setHasVideo(CFArrayGetCount(visualTracks.get()));
998
999         RetainPtr<CFArrayRef> audioTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible));
1000         setHasAudio(CFArrayGetCount(audioTracks.get()));
1001
1002 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1003         RetainPtr<CFArrayRef> captionTracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeClosedCaption));
1004         hasCaptions = CFArrayGetCount(captionTracks.get());
1005 #endif
1006     } else {
1007         bool hasVideo = false;
1008         bool hasAudio = false;
1009
1010         RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1011
1012         CFIndex trackCount = CFArrayGetCount(tracks.get());
1013         for (CFIndex i = 0; i < trackCount; i++) {
1014             AVCFPlayerItemTrackRef track = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1015             
1016             if (AVCFPlayerItemTrackIsEnabled(track)) {
1017                 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(track));
1018                 if (!assetTrack) {
1019                     // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1020                     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1021                     continue;
1022                 }
1023                 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1024                 if (!mediaType)
1025                     continue;
1026                 
1027                 if (CFStringCompare(mediaType, AVCFMediaTypeVideo, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1028                     hasVideo = true;
1029                 else if (CFStringCompare(mediaType, AVCFMediaTypeAudio, kCFCompareCaseInsensitive) == kCFCompareEqualTo)
1030                     hasAudio = true;
1031                 else if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) == kCFCompareEqualTo) {
1032 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1033                     hasCaptions = true;
1034 #endif
1035                     haveCCTrack = true;
1036                 }
1037             }
1038         }
1039
1040         setHasVideo(hasVideo);
1041         setHasAudio(hasAudio);
1042     }
1043
1044 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1045     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1046     if (legibleGroup) {
1047         RetainPtr<CFArrayRef> playableOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1048         hasCaptions = CFArrayGetCount(playableOptions.get());
1049         if (hasCaptions)
1050             processMediaSelectionOptions();
1051     }
1052 #endif
1053
1054 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1055     if (haveCCTrack)
1056         processLegacyClosedCaptionsTracks();
1057 #endif
1058
1059     setHasClosedCaptions(hasCaptions);
1060
1061     LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s", 
1062         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1063
1064     sizeChanged();
1065
1066     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1067         characteristicsChanged();
1068
1069     setDelayCharacteristicsChangedNotification(false);
1070 }
1071
1072 void MediaPlayerPrivateAVFoundationCF::sizeChanged()
1073 {
1074     ASSERT(isMainThread());
1075     if (!avAsset(m_avfWrapper))
1076         return;
1077     
1078     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1079     // the union of all visual track rects.
1080     CGRect trackRectUnion = CGRectZero;
1081     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual));
1082     CFIndex trackCount = CFArrayGetCount(tracks.get());
1083     for (CFIndex i = 0; i < trackCount; i++) {
1084         AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1085         
1086         CGSize trackSize = AVCFAssetTrackGetNaturalSize(assetTrack);
1087         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1088         trackRectUnion = CGRectUnion(trackRectUnion, CGRectApplyAffineTransform(trackRect, AVCFAssetTrackGetPreferredTransform(assetTrack)));
1089     }
1090     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1091     trackRectUnion = CGRectOffset(trackRectUnion, trackRectUnion.origin.x, trackRectUnion.origin.y);
1092     CGSize naturalSize = trackRectUnion.size;
1093
1094     if (!naturalSize.height && !naturalSize.width && avPlayerItem(m_avfWrapper))
1095         naturalSize = AVCFPlayerItemGetPresentationSize(avPlayerItem(m_avfWrapper));
1096
1097     // Also look at the asset's preferred transform so we account for a movie matrix.
1098     CGSize movieSize = CGSizeApplyAffineTransform(AVCFAssetGetNaturalSize(avAsset(m_avfWrapper)), AVCFAssetGetPreferredTransform(avAsset(m_avfWrapper)));
1099     if (movieSize.width > naturalSize.width)
1100         naturalSize.width = movieSize.width;
1101     if (movieSize.height > naturalSize.height)
1102         naturalSize.height = movieSize.height;
1103     setNaturalSize(IntSize(naturalSize));
1104 }
1105
1106 void MediaPlayerPrivateAVFoundationCF::resolvedURLChanged()
1107 {
1108     if (m_avfWrapper && m_avfWrapper->avAsset())
1109         setResolvedURL(URL(adoptCF(AVCFAssetCopyResolvedURL(m_avfWrapper->avAsset())).get()));
1110     else
1111         setResolvedURL({ });
1112 }
1113
1114 bool MediaPlayerPrivateAVFoundationCF::requiresImmediateCompositing() const
1115 {
1116     // The AVFoundationCF player needs to have the root compositor available at construction time
1117     // so it can attach to the rendering device. Otherwise it falls back to CPU-only mode.
1118     //
1119     // It would be nice if AVCFPlayer had some way to switch to hardware-accelerated mode
1120     // when asked, then we could follow AVFoundation's model and switch to compositing
1121     // mode when beginning to play media.
1122     return true;
1123 }
1124
1125 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
1126
1127 RetainPtr<AVCFAssetResourceLoadingRequestRef> MediaPlayerPrivateAVFoundationCF::takeRequestForKeyURI(const String& keyURI)
1128 {
1129     if (!m_avfWrapper)
1130         return nullptr;
1131
1132     return m_avfWrapper->takeRequestForKeyURI(keyURI);
1133 }
1134
1135 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem, LegacyCDMSessionClient* client)
1136 {
1137     if (!keySystemIsSupported(keySystem))
1138         return nullptr;
1139
1140     return std::make_unique<CDMSessionAVFoundationCF>(*this, client);
1141 }
1142
1143 #elif ENABLE(LEGACY_ENCRYPTED_MEDIA)
1144
1145 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem, LegacyCDMSessionClient*)
1146 {
1147     return nullptr;
1148 }
1149
1150 #endif
1151
1152 long MediaPlayerPrivateAVFoundationCF::assetErrorCode() const
1153 {
1154     if (!avAsset(m_avfWrapper))
1155         return 0;
1156
1157     CFErrorRef error = nullptr;
1158     AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), AVCFAssetPropertyPlayable, &error);
1159     if (!error)
1160         return 0;
1161
1162     long code = CFErrorGetCode(error);
1163     CFRelease(error);
1164     return code;
1165 }
1166
1167 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1168 void MediaPlayerPrivateAVFoundationCF::processLegacyClosedCaptionsTracks()
1169 {
1170 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1171     AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), 0, safeMediaSelectionGroupForLegibleMedia(m_avfWrapper));
1172 #endif
1173
1174     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1175     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper)));
1176     CFIndex trackCount = CFArrayGetCount(tracks.get());
1177     for (CFIndex i = 0; i < trackCount; ++i) {
1178         AVCFPlayerItemTrackRef playerItemTrack = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i));
1179
1180         RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(playerItemTrack));
1181         if (!assetTrack) {
1182             // Asset tracks may not be available yet when streaming. <rdar://problem/16160699>
1183             LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track);
1184             continue;
1185         }
1186         CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get());
1187         if (!mediaType)
1188             continue;
1189                 
1190         if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) != kCFCompareEqualTo)
1191             continue;
1192
1193         bool newCCTrack = true;
1194         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1195             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
1196                 continue;
1197
1198             RefPtr<InbandTextTrackPrivateLegacyAVCF> track = static_cast<InbandTextTrackPrivateLegacyAVCF*>(m_textTracks[i - 1].get());
1199             if (track->avPlayerItemTrack() == playerItemTrack) {
1200                 removedTextTracks.remove(i - 1);
1201                 newCCTrack = false;
1202                 break;
1203             }
1204         }
1205
1206         if (!newCCTrack)
1207             continue;
1208         
1209         m_textTracks.append(InbandTextTrackPrivateLegacyAVCF::create(this, playerItemTrack));
1210     }
1211
1212     processNewAndRemovedTextTracks(removedTextTracks);
1213 }
1214 #endif
1215
1216 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1217 void MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions()
1218 {
1219     AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper);
1220     if (!legibleGroup) {
1221         LOG(Media, "MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1222         return;
1223     }
1224
1225     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1226     // but set the selected legible track to nil so text tracks will not be automatically configured.
1227     if (!m_textTracks.size() && AVCFMediaSelectionGroupAllowsEmptySelection(legibleGroup)) {
1228         if (AVCFPlayerItemRef playerItem = avPlayerItem(m_avfWrapper))
1229             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(playerItem, 0, legibleGroup);
1230     }
1231
1232     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1233     RetainPtr<CFArrayRef> legibleOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup)));
1234     CFIndex legibleOptionsCount = CFArrayGetCount(legibleOptions.get());
1235     for (CFIndex i = 0; i < legibleOptionsCount; ++i) {
1236         AVCFMediaSelectionOptionRef option = static_cast<AVCFMediaSelectionOptionRef>(CFArrayGetValueAtIndex(legibleOptions.get(), i));
1237         bool newTrack = true;
1238         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1239             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1240                 continue;
1241
1242             RefPtr<InbandTextTrackPrivateAVCF> track = static_cast<InbandTextTrackPrivateAVCF*>(removedTextTracks[i - 1].get());
1243             if (CFEqual(track->mediaSelectionOption(), option)) {
1244                 removedTextTracks.remove(i - 1);
1245                 newTrack = false;
1246                 break;
1247             }
1248         }
1249         if (!newTrack)
1250             continue;
1251
1252         m_textTracks.append(InbandTextTrackPrivateAVCF::create(this, option, InbandTextTrackPrivate::Generic));
1253     }
1254
1255     processNewAndRemovedTextTracks(removedTextTracks);
1256 }
1257
1258 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1259
1260 void AVFWrapper::setCurrentTextTrack(InbandTextTrackPrivateAVF* track)
1261 {
1262     if (m_currentTextTrack == track)
1263         return;
1264
1265     LOG(Media, "MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1266         
1267     m_currentTextTrack = track;
1268
1269     if (track) {
1270         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
1271             AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), TRUE);
1272 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1273         else
1274             AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), static_cast<InbandTextTrackPrivateAVCF*>(track)->mediaSelectionOption(), safeMediaSelectionGroupForLegibleMedia());
1275 #endif
1276     } else {
1277 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1278         AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), 0, safeMediaSelectionGroupForLegibleMedia());
1279 #endif
1280         AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), FALSE);
1281     }
1282 }
1283
1284 String MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack() const
1285 {
1286     if (!m_languageOfPrimaryAudioTrack.isNull())
1287         return m_languageOfPrimaryAudioTrack;
1288
1289     if (!avPlayerItem(m_avfWrapper))
1290         return emptyString();
1291
1292 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1293     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1294     AVCFMediaSelectionGroupRef audibleGroup = AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible);
1295     AVCFMediaSelectionOptionRef currentlySelectedAudibleOption = AVCFPlayerItemGetSelectedMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), audibleGroup);
1296     if (currentlySelectedAudibleOption) {
1297         RetainPtr<CFLocaleRef> audibleOptionLocale = adoptCF(AVCFMediaSelectionOptionCopyLocale(currentlySelectedAudibleOption));
1298         if (audibleOptionLocale)
1299             m_languageOfPrimaryAudioTrack = CFLocaleGetIdentifier(audibleOptionLocale.get());
1300         else
1301             m_languageOfPrimaryAudioTrack = emptyString();
1302
1303         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1304
1305         return m_languageOfPrimaryAudioTrack;
1306     }
1307 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1308
1309     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1310     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1311     RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeAudio));
1312     CFIndex trackCount = CFArrayGetCount(tracks.get());
1313     if (!tracks || trackCount != 1) {
1314         m_languageOfPrimaryAudioTrack = emptyString();
1315         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? trackCount : 0));
1316         return m_languageOfPrimaryAudioTrack;
1317     }
1318
1319     AVCFAssetTrackRef track = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), 0);
1320     RetainPtr<CFStringRef> language = adoptCF(AVCFAssetTrackCopyExtendedLanguageTag(track));
1321
1322     // If the language code is stored as a QuickTime 5-bit packed code there aren't enough bits for a full
1323     // RFC 4646 language tag so extendedLanguageTag returns null. In this case languageCode will return the
1324     // ISO 639-2/T language code so check it.
1325     if (!language)
1326         language = adoptCF(AVCFAssetTrackCopyLanguageCode(track));
1327
1328     // Some legacy tracks have "und" as a language, treat that the same as no language at all.
1329     if (language && CFStringCompare(language.get(), CFSTR("und"), kCFCompareCaseInsensitive) != kCFCompareEqualTo) {
1330         m_languageOfPrimaryAudioTrack = language.get();
1331         LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1332         return m_languageOfPrimaryAudioTrack;
1333     }
1334
1335     LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1336     m_languageOfPrimaryAudioTrack = emptyString();
1337     return m_languageOfPrimaryAudioTrack;
1338 }
1339
1340 void MediaPlayerPrivateAVFoundationCF::contentsNeedsDisplay()
1341 {
1342     if (m_avfWrapper)
1343         m_avfWrapper->setVideoLayerNeedsCommit();
1344 }
1345
1346 AVFWrapper::AVFWrapper(MediaPlayerPrivateAVFoundationCF* owner)
1347     : m_owner(owner)
1348     , m_objectID(s_nextAVFWrapperObjectID++)
1349     , m_currentTextTrack(0)
1350 {
1351     ASSERT(isMainThread());
1352     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1353     LOG(Media, "AVFWrapper::AVFWrapper(%p)", this);
1354
1355     m_notificationQueue = dispatch_queue_create("MediaPlayerPrivateAVFoundationCF.notificationQueue", 0);
1356
1357 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1358     m_resourceLoaderCallbacks.version = kAVCFAssetResourceLoader_CallbacksVersion_1;
1359     m_resourceLoaderCallbacks.context = nullptr;
1360     m_resourceLoaderCallbacks.resourceLoaderShouldWaitForLoadingOfRequestedResource = AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource;
1361 #endif
1362
1363     addToMap();
1364 }
1365
1366 AVFWrapper::~AVFWrapper()
1367 {
1368     ASSERT(isMainThread());
1369     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1370     LOG(Media, "AVFWrapper::~AVFWrapper(%p %d)", this, m_objectID);
1371
1372     destroyVideoLayer();
1373     destroyImageGenerator();
1374
1375     if (m_notificationQueue)
1376         dispatch_release(m_notificationQueue);
1377
1378     if (avAsset()) {
1379         AVCFAssetCancelLoading(avAsset());
1380         m_avAsset = 0;
1381     }
1382
1383 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1384     if (legibleOutput()) {
1385         if (avPlayerItem())
1386             AVCFPlayerItemRemoveOutput(avPlayerItem(), legibleOutput());
1387         m_legibleOutput = 0;
1388     }
1389 #endif
1390
1391     m_avPlayerItem = 0;
1392     m_timeObserver = 0;
1393     m_avPlayer = 0;
1394 }
1395
1396 Lock& AVFWrapper::mapLock()
1397 {
1398     static Lock mapLock;
1399     return mapLock;
1400 }
1401
1402 HashMap<uintptr_t, AVFWrapper*>& AVFWrapper::map()
1403 {
1404     static HashMap<uintptr_t, AVFWrapper*>& map = *new HashMap<uintptr_t, AVFWrapper*>;
1405     return map;
1406 }
1407
1408 void AVFWrapper::addToMap()
1409 {
1410     LockHolder locker(mapLock());
1411     
1412     // HashMap doesn't like a key of 0, and also make sure we aren't
1413     // using an object ID that's already in use.
1414     while (!m_objectID || (map().find(m_objectID) != map().end()))
1415         m_objectID = s_nextAVFWrapperObjectID++;
1416        
1417     LOG(Media, "AVFWrapper::addToMap(%p %d)", this, m_objectID);
1418
1419     map().add(m_objectID, this);
1420 }
1421
1422 void AVFWrapper::removeFromMap() const
1423 {
1424     LOG(Media, "AVFWrapper::removeFromMap(%p %d)", this, m_objectID);
1425
1426     LockHolder locker(mapLock());
1427     map().remove(m_objectID);
1428 }
1429
1430 AVFWrapper* AVFWrapper::avfWrapperForCallbackContext(void* context)
1431 {
1432     // Assumes caller has locked mapLock().
1433     HashMap<uintptr_t, AVFWrapper*>::iterator it = map().find(reinterpret_cast<uintptr_t>(context));
1434     if (it == map().end())
1435         return 0;
1436
1437     return it->value;
1438 }
1439
1440 void AVFWrapper::scheduleDisconnectAndDelete()
1441 {
1442     // Ignore any subsequent notifications we might receive in notificationCallback().
1443     removeFromMap();
1444
1445     dispatch_async_f(dispatchQueue(), this, disconnectAndDeleteAVFWrapper);
1446 }
1447
1448 static void destroyAVFWrapper(void* context)
1449 {
1450     ASSERT(isMainThread());
1451     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1452     if (!avfWrapper)
1453         return;
1454
1455     delete avfWrapper;
1456 }
1457
1458 void AVFWrapper::disconnectAndDeleteAVFWrapper(void* context)
1459 {
1460     AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context);
1461
1462     LOG(Media, "AVFWrapper::disconnectAndDeleteAVFWrapper(%p)", avfWrapper);
1463
1464     if (avfWrapper->avPlayerItem()) {
1465         CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1466         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDidPlayToEndTimeNotification, avfWrapper->avPlayerItem());
1467         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemStatusChangedNotification, avfWrapper->avPlayerItem());
1468         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemTracksChangedNotification, avfWrapper->avPlayerItem());
1469         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemSeekableTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1470         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemLoadedTimeRangesChangedNotification, avfWrapper->avPlayerItem());
1471         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemPresentationSizeChangedNotification, avfWrapper->avPlayerItem());
1472         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, avfWrapper->avPlayerItem());
1473         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, avfWrapper->avPlayerItem());
1474         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferFullChangedNotification, avfWrapper->avPlayerItem());
1475         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDurationChangedNotification, avfWrapper->avPlayerItem());
1476         CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), CACFContextNeedsFlushNotification(), 0);
1477     }
1478
1479     if (avfWrapper->avPlayer()) {
1480         if (avfWrapper->timeObserver())
1481             AVCFPlayerRemoveObserver(avfWrapper->avPlayer(), avfWrapper->timeObserver());
1482
1483         CFNotificationCenterRemoveObserver(CFNotificationCenterGetLocalCenter(), avfWrapper->callbackContext(), AVCFPlayerRateChangedNotification, avfWrapper->avPlayer());
1484     }
1485
1486 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1487     AVCFPlayerItemRemoveOutput(avfWrapper->avPlayerItem(), avfWrapper->legibleOutput());
1488 #endif
1489
1490     // We must release the AVCFPlayer and other items on the same thread that created them.
1491     dispatch_async_f(dispatch_get_main_queue(), context, destroyAVFWrapper);
1492 }
1493
1494 void AVFWrapper::createAssetForURL(const URL& url, bool inheritURI)
1495 {
1496     ASSERT(!avAsset());
1497
1498     RetainPtr<CFURLRef> urlRef = url.createCFURL();
1499
1500     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1501
1502     if (inheritURI)
1503         CFDictionarySetValue(optionsRef.get(), AVCFURLAssetInheritURIQueryComponentFromReferencingURIKey, kCFBooleanTrue);
1504
1505     const int restrictions = AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote;
1506     auto cfRestrictions = adoptCF(CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &restrictions));
1507
1508     CFDictionarySetValue(optionsRef.get(), AVCFURLAssetReferenceRestrictionsKey, cfRestrictions.get());
1509
1510     m_avAsset = adoptCF(AVCFURLAssetCreateWithURLAndOptions(kCFAllocatorDefault, urlRef.get(), optionsRef.get(), m_notificationQueue));
1511
1512 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1513     ASSERT(callbackContext());
1514     m_resourceLoaderCallbacks.context = callbackContext();
1515
1516     AVCFAssetResourceLoaderRef resourceLoader = AVCFURLAssetGetResourceLoader(m_avAsset.get());
1517     AVCFAssetResourceLoaderSetCallbacks(resourceLoader, &m_resourceLoaderCallbacks, globalLoaderDelegateQueue());
1518 #endif
1519 }
1520
1521 void AVFWrapper::createPlayer(IDirect3DDevice9* d3dDevice)
1522 {
1523     ASSERT(isMainThread());
1524     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1525     ASSERT(avPlayerItem());
1526
1527     if (avPlayer())
1528         return;
1529
1530     RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
1531
1532     if (d3dDevice) {
1533         // QI for an IDirect3DDevice9Ex interface, it is required to do HW video decoding.
1534         COMPtr<IDirect3DDevice9Ex> d3dEx(Query, d3dDevice);
1535         m_d3dDevice = d3dEx;
1536     } else
1537         m_d3dDevice = 0;
1538
1539     if (m_d3dDevice && AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey)
1540         CFDictionarySetValue(optionsRef.get(), AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey, kCFBooleanTrue);
1541
1542 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1543     CFDictionarySetValue(optionsRef.get(), AVCFPlayerAppliesMediaSelectionCriteriaAutomaticallyKey, kCFBooleanTrue);
1544 #endif
1545
1546     // FIXME: We need a way to create a AVPlayer without an AVPlayerItem, see <rdar://problem/9877730>.
1547     AVCFPlayerRef playerRef = AVCFPlayerCreateWithPlayerItemAndOptions(kCFAllocatorDefault, avPlayerItem(), optionsRef.get(), m_notificationQueue);
1548     m_avPlayer = adoptCF(playerRef);
1549 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1550     AVCFPlayerSetClosedCaptionDisplayEnabled(playerRef, FALSE);
1551 #endif
1552
1553     if (m_d3dDevice && AVCFPlayerSetDirect3DDevicePtr())
1554         AVCFPlayerSetDirect3DDevicePtr()(playerRef, m_d3dDevice.get());
1555
1556     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1557     ASSERT(center);
1558
1559     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerRateChangedNotification, playerRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1560
1561     // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but
1562     // our observer will also be called whenever a seek happens.
1563     const double veryLongInterval = 60*60*60*24*30;
1564     m_timeObserver = adoptCF(AVCFPlayerCreatePeriodicTimeObserverForInterval(playerRef, CMTimeMake(veryLongInterval, 10), m_notificationQueue, &periodicTimeObserverCallback, callbackContext()));
1565 }
1566
1567 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1568 static RetainPtr<CFArrayRef> createLegibleOutputSubtypes()
1569 {
1570     int webVTTInt = 'wvtt'; // kCMSubtitleFormatType_WebVTT;
1571     RetainPtr<CFNumberRef> webVTTNumber = adoptCF(CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &webVTTInt));
1572     CFTypeRef formatTypes[] = { webVTTNumber.get() };
1573     return adoptCF(CFArrayCreate(0, formatTypes, WTF_ARRAY_LENGTH(formatTypes), &kCFTypeArrayCallBacks));
1574 }
1575 #endif
1576
1577 void AVFWrapper::createPlayerItem()
1578 {
1579     ASSERT(isMainThread());
1580     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1581     ASSERT(avAsset());
1582
1583     if (avPlayerItem())
1584         return;
1585
1586     // Create the player item so we begin loading media data.
1587     AVCFPlayerItemRef itemRef = AVCFPlayerItemCreateWithAsset(kCFAllocatorDefault, avAsset(), m_notificationQueue);
1588     m_avPlayerItem = adoptCF(itemRef);
1589
1590     CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter();
1591     ASSERT(center);
1592
1593     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDidPlayToEndTimeNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1594     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemStatusChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1595     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemTracksChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1596     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemSeekableTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1597     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemLoadedTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1598     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemPresentationSizeChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1599     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1600     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1601     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferFullChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1602     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDurationChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately);
1603     // FIXME: Are there other legible output things we need to register for? asset and hasEnabledAudio are not exposed by AVCF
1604
1605     CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, CACFContextNeedsFlushNotification(), 0, CFNotificationSuspensionBehaviorDeliverImmediately);
1606
1607 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1608     const CFTimeInterval legibleOutputAdvanceInterval = 2;
1609
1610     m_legibleOutput = adoptCF(AVCFPlayerItemLegibleOutputCreateWithMediaSubtypesForNativeRepresentation(kCFAllocatorDefault, createLegibleOutputSubtypes().get()));
1611     AVCFPlayerItemOutputSetSuppressPlayerRendering(m_legibleOutput.get(), TRUE);
1612
1613     AVCFPlayerItemLegibleOutputCallbacks callbackInfo;
1614 #if HAVE(AVCFPLAYERITEM_CALLBACK_VERSION_2)
1615     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_2;
1616 #else
1617     callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_1;
1618 #endif
1619     ASSERT(callbackContext());
1620     callbackInfo.context = callbackContext();
1621     callbackInfo.legibleOutputCallback = AVFWrapper::legibleOutputCallback;
1622
1623     AVCFPlayerItemLegibleOutputSetCallbacks(m_legibleOutput.get(), &callbackInfo, dispatchQueue());
1624     AVCFPlayerItemLegibleOutputSetAdvanceIntervalForCallbackInvocation(m_legibleOutput.get(), legibleOutputAdvanceInterval);
1625     AVCFPlayerItemLegibleOutputSetTextStylingResolution(m_legibleOutput.get(), AVCFPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly);
1626     AVCFPlayerItemAddOutput(m_avPlayerItem.get(), m_legibleOutput.get());
1627 #endif
1628 }
1629
1630 void AVFWrapper::periodicTimeObserverCallback(AVCFPlayerRef, CMTime cmTime, void* context)
1631 {
1632     LockHolder locker(mapLock());
1633     AVFWrapper* self = avfWrapperForCallbackContext(context);
1634     if (!self) {
1635         LOG(Media, "AVFWrapper::periodicTimeObserverCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1636         return;
1637     }
1638
1639     double time = std::max(0.0, CMTimeGetSeconds(cmTime)); // Clamp to zero, negative values are sometimes reported.
1640     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time);
1641 }
1642
1643 struct NotificationCallbackData {
1644     RetainPtr<CFStringRef> m_propertyName;
1645     void* m_context;
1646
1647     NotificationCallbackData(CFStringRef propertyName, void* context)
1648         : m_propertyName(propertyName), m_context(context)
1649     {
1650     }
1651 };
1652
1653 void AVFWrapper::processNotification(void* context)
1654 {
1655     ASSERT(isMainThread());
1656     ASSERT(context);
1657
1658     if (!context)
1659         return;
1660
1661     std::unique_ptr<NotificationCallbackData> notificationData { static_cast<NotificationCallbackData*>(context) };
1662
1663     LockHolder locker(mapLock());
1664     AVFWrapper* self = avfWrapperForCallbackContext(notificationData->m_context);
1665     if (!self) {
1666         LOG(Media, "AVFWrapper::processNotification invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1667         return;
1668     }
1669
1670     CFStringRef propertyName = notificationData->m_propertyName.get();
1671
1672     if (CFEqual(propertyName, AVCFPlayerItemDidPlayToEndTimeNotification))
1673         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
1674     else if (CFEqual(propertyName, AVCFPlayerItemTracksChangedNotification))
1675         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1676     else if (CFEqual(propertyName, AVCFPlayerItemStatusChangedNotification)) {
1677         AVCFURLAssetRef asset = AVCFPlayerItemGetAsset(self->avPlayerItem());
1678         if (asset)
1679             self->setAsset(asset);
1680         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
1681     } else if (CFEqual(propertyName, AVCFPlayerItemSeekableTimeRangesChangedNotification))
1682         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
1683     else if (CFEqual(propertyName, AVCFPlayerItemLoadedTimeRangesChangedNotification))
1684         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
1685     else if (CFEqual(propertyName, AVCFPlayerItemPresentationSizeChangedNotification))
1686         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
1687     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification))
1688         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
1689     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification))
1690         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
1691     else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferFullChangedNotification))
1692         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
1693     else if (CFEqual(propertyName, AVCFPlayerRateChangedNotification))
1694         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
1695     else if (CFEqual(propertyName, CACFContextNeedsFlushNotification()))
1696         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ContentsNeedsDisplay);
1697     else if (CFEqual(propertyName, AVCFPlayerItemDurationChangedNotification))
1698         self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged);
1699     else
1700         ASSERT_NOT_REACHED();
1701 }
1702
1703 void AVFWrapper::notificationCallback(CFNotificationCenterRef, void* observer, CFStringRef propertyName, const void* object, CFDictionaryRef)
1704 {
1705 #if !LOG_DISABLED
1706     char notificationName[256];
1707     CFStringGetCString(propertyName, notificationName, sizeof(notificationName), kCFStringEncodingASCII);
1708     LOG(Media, "AVFWrapper::notificationCallback(if=%d) %s", reinterpret_cast<uintptr_t>(observer), notificationName);
1709 #endif
1710
1711     auto notificationData = std::make_unique<NotificationCallbackData>(propertyName, observer);
1712
1713     dispatch_async_f(dispatch_get_main_queue(), notificationData.release(), processNotification);
1714 }
1715
1716 void AVFWrapper::loadPlayableCompletionCallback(AVCFAssetRef, void* context)
1717 {
1718     LockHolder locker(mapLock());
1719     AVFWrapper* self = avfWrapperForCallbackContext(context);
1720     if (!self) {
1721         LOG(Media, "AVFWrapper::loadPlayableCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1722         return;
1723     }
1724
1725     LOG(Media, "AVFWrapper::loadPlayableCompletionCallback(%p)", self);
1726     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1727 }
1728
1729 void AVFWrapper::checkPlayability()
1730 {
1731     LOG(Media, "AVFWrapper::checkPlayability(%p)", this);
1732
1733     static CFArrayRef propertyKeyName;
1734     if (!propertyKeyName) {
1735         static const CFStringRef keyNames[] = { 
1736             AVCFAssetPropertyPlayable
1737         };
1738         propertyKeyName = CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks);
1739     }
1740
1741     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), propertyKeyName, loadPlayableCompletionCallback, callbackContext());
1742 }
1743
1744 void AVFWrapper::loadMetadataCompletionCallback(AVCFAssetRef, void* context)
1745 {
1746     LockHolder locker(mapLock());
1747     AVFWrapper* self = avfWrapperForCallbackContext(context);
1748     if (!self) {
1749         LOG(Media, "AVFWrapper::loadMetadataCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1750         return;
1751     }
1752
1753     LOG(Media, "AVFWrapper::loadMetadataCompletionCallback(%p)", self);
1754     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
1755 }
1756
1757 void AVFWrapper::beginLoadingMetadata()
1758 {
1759     ASSERT(avAsset());
1760     LOG(Media, "AVFWrapper::beginLoadingMetadata(%p) - requesting metadata loading", this);
1761     AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), metadataKeyNames(), loadMetadataCompletionCallback, callbackContext());
1762 }
1763
1764 void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void* context)
1765 {
1766     LockHolder locker(mapLock());
1767     AVFWrapper* self = avfWrapperForCallbackContext(context);
1768     if (!self) {
1769         LOG(Media, "AVFWrapper::seekCompletedCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1770         return;
1771     }
1772
1773     LOG(Media, "AVFWrapper::seekCompletedCallback(%p)", self);
1774     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
1775 }
1776
1777 void AVFWrapper::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1778 {
1779     ASSERT(avPlayerItem());
1780     CMTime cmTime = PAL::toCMTime(time);
1781     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1782     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1783     AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), cmTime, cmBefore, cmAfter, &seekCompletedCallback, callbackContext());
1784 }
1785
1786 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1787 struct LegibleOutputData {
1788     RetainPtr<CFArrayRef> m_attributedStrings;
1789     RetainPtr<CFArrayRef> m_samples;
1790     MediaTime m_time;
1791     void* m_context;
1792
1793     LegibleOutputData(CFArrayRef strings, CFArrayRef samples, const MediaTime &time, void* context)
1794         : m_attributedStrings(strings), m_samples(samples), m_time(time), m_context(context)
1795     {
1796     }
1797 };
1798
1799 void AVFWrapper::processCue(void* context)
1800 {
1801     ASSERT(isMainThread());
1802     ASSERT(context);
1803
1804     if (!context)
1805         return;
1806
1807     std::unique_ptr<LegibleOutputData> legibleOutputData(reinterpret_cast<LegibleOutputData*>(context));
1808
1809     LockHolder locker(mapLock());
1810     AVFWrapper* self = avfWrapperForCallbackContext(legibleOutputData->m_context);
1811     if (!self) {
1812         LOG(Media, "AVFWrapper::processCue invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1813         return;
1814     }
1815
1816     if (!self->m_currentTextTrack)
1817         return;
1818
1819     self->m_currentTextTrack->processCue(legibleOutputData->m_attributedStrings.get(), legibleOutputData->m_samples.get(), legibleOutputData->m_time);
1820 }
1821
1822 void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef legibleOutput, CFArrayRef attributedStrings, CFArrayRef nativeSampleBuffers, CMTime itemTime)
1823 {
1824     ASSERT(!isMainThread());
1825     LockHolder locker(mapLock());
1826     AVFWrapper* self = avfWrapperForCallbackContext(context);
1827     if (!self) {
1828         LOG(Media, "AVFWrapper::legibleOutputCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1829         return;
1830     }
1831
1832     LOG(Media, "AVFWrapper::legibleOutputCallback(%p)", self);
1833
1834     ASSERT(legibleOutput == self->m_legibleOutput);
1835
1836     auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, PAL::toMediaTime(itemTime), context);
1837
1838     dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.release(), processCue);
1839 }
1840 #endif
1841
1842 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1843 struct LoadRequestData {
1844     RetainPtr<AVCFAssetResourceLoadingRequestRef> m_request;
1845     void* m_context;
1846
1847     LoadRequestData(AVCFAssetResourceLoadingRequestRef request, void* context)
1848         : m_request(request), m_context(context)
1849     {
1850     }
1851 };
1852
1853 void AVFWrapper::processShouldWaitForLoadingOfResource(void* context)
1854 {
1855     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1856     ASSERT(context);
1857
1858     if (!context)
1859         return;
1860
1861     std::unique_ptr<LoadRequestData> loadRequestData(reinterpret_cast<LoadRequestData*>(context));
1862
1863     LockHolder locker(mapLock());
1864     AVFWrapper* self = avfWrapperForCallbackContext(loadRequestData->m_context);
1865     if (!self) {
1866         LOG(Media, "AVFWrapper::processShouldWaitForLoadingOfResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1867         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1868         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1869         return;
1870     }
1871
1872     if (!self->shouldWaitForLoadingOfResource(loadRequestData->m_request.get())) {
1873         RetainPtr<CFErrorRef> error = adoptCF(CFErrorCreate(kCFAllocatorDefault, kCFErrorDomainCFNetwork, kCFURLErrorUnknown, nullptr));
1874         AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), error.get());
1875     }
1876 }
1877
1878 bool AVFWrapper::shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest)
1879 {
1880 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
1881     RetainPtr<CFURLRequestRef> urlRequest = AVCFAssetResourceLoadingRequestGetURLRequest(avRequest);
1882     RetainPtr<CFURLRef> requestURL = CFURLRequestGetURL(urlRequest.get());
1883     RetainPtr<CFStringRef> schemeRef = adoptCF(CFURLCopyScheme(requestURL.get()));
1884     String scheme = schemeRef.get();
1885
1886     if (scheme == "skd") {
1887         RetainPtr<CFURLRef> absoluteURL = adoptCF(CFURLCopyAbsoluteURL(requestURL.get()));
1888         RetainPtr<CFStringRef> keyURIRef = CFURLGetString(absoluteURL.get());
1889         String keyURI = keyURIRef.get();
1890
1891         // Create an initData with the following layout:
1892         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1893         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1894         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1895         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, initDataBuffer->byteLength());
1896         initDataView->set<uint32_t>(0, keyURISize, true);
1897
1898         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1899         keyURIArray->setRange(reinterpret_cast<const uint16_t*>(StringView(keyURI).upconvertedCharacters().get()), keyURI.length() / sizeof(unsigned char), 0);
1900
1901         unsigned byteLength = initDataBuffer->byteLength();
1902         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1903         if (!m_owner->player()->keyNeeded(initData.ptr()))
1904             return false;
1905
1906         setRequestForKey(keyURI, avRequest);
1907         return true;
1908     }
1909 #endif
1910
1911     RefPtr<WebCoreAVCFResourceLoader> resourceLoader = WebCoreAVCFResourceLoader::create(m_owner, avRequest);
1912     m_owner->m_resourceLoaderMap.add(avRequest, resourceLoader);
1913     resourceLoader->startLoading();
1914     return true;
1915 }
1916
1917 Boolean AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef resourceLoader, AVCFAssetResourceLoadingRequestRef loadingRequest, void *context)
1918 {
1919     ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue());
1920     LockHolder locker(mapLock());
1921     AVFWrapper* self = avfWrapperForCallbackContext(context);
1922     if (!self) {
1923         LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context));
1924         return false;
1925     }
1926
1927     LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(%p)", self);
1928
1929     auto loadRequestData = std::make_unique<LoadRequestData>(loadingRequest, context);
1930
1931     dispatch_async_f(dispatch_get_main_queue(), loadRequestData.release(), processShouldWaitForLoadingOfResource);
1932
1933     return true;
1934 }
1935 #endif
1936
1937 void AVFWrapper::setAsset(AVCFURLAssetRef asset)
1938 {
1939     if (asset == avAsset())
1940         return;
1941
1942     AVCFAssetCancelLoading(avAsset());
1943     m_avAsset = asset;
1944 }
1945
1946 PlatformLayer* AVFWrapper::platformLayer()
1947 {
1948     ASSERT(isMainThread());
1949     if (m_videoLayerWrapper)
1950         return m_videoLayerWrapper->platformLayer();
1951
1952     if (!videoLayer())
1953         return 0;
1954
1955     // Create a PlatformCALayer so we can resize the video layer to match the element size.
1956     m_layerClient = std::make_unique<LayerClient>(this);
1957     if (!m_layerClient)
1958         return 0;
1959
1960     m_videoLayerWrapper = PlatformCALayerWin::create(PlatformCALayer::LayerTypeLayer, m_layerClient.get());
1961     if (!m_videoLayerWrapper)
1962         return 0;
1963
1964     m_caVideoLayer = adoptCF(AVCFPlayerLayerCopyCACFLayer(m_avCFVideoLayer.get()));
1965
1966     CACFLayerInsertSublayer(m_videoLayerWrapper->platformLayer(), m_caVideoLayer.get(), 0);
1967     m_videoLayerWrapper->setAnchorPoint(FloatPoint3D());
1968     m_videoLayerWrapper->setNeedsLayout();
1969     updateVideoLayerGravity();
1970
1971     return m_videoLayerWrapper->platformLayer();
1972 }
1973
1974 void AVFWrapper::createAVCFVideoLayer()
1975 {
1976     ASSERT(isMainThread());
1977     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1978     if (!avPlayer() || m_avCFVideoLayer)
1979         return;
1980
1981     // The layer will get hooked up via RenderLayerBacking::updateConfiguration().
1982     m_avCFVideoLayer = adoptCF(AVCFPlayerLayerCreateWithAVCFPlayer(kCFAllocatorDefault, avPlayer(), m_notificationQueue));
1983     LOG(Media, "AVFWrapper::createAVCFVideoLayer(%p) - returning %p", this, videoLayer());
1984 }
1985
1986 void AVFWrapper::destroyVideoLayer()
1987 {
1988     ASSERT(isMainThread());
1989     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
1990     LOG(Media, "AVFWrapper::destroyVideoLayer(%p)", this);
1991     m_layerClient = nullptr;
1992     m_caVideoLayer = nullptr;
1993     m_videoLayerWrapper = nullptr;
1994     if (!m_avCFVideoLayer.get())
1995         return;
1996
1997     AVCFPlayerLayerSetPlayer((AVCFPlayerLayerRef)m_avCFVideoLayer.get(), nullptr);
1998     m_avCFVideoLayer = nullptr;
1999 }
2000
2001 void AVFWrapper::setVideoLayerNeedsCommit()
2002 {
2003     if (m_videoLayerWrapper)
2004         m_videoLayerWrapper->setNeedsCommit();
2005 }
2006
2007 void AVFWrapper::setVideoLayerHidden(bool value)
2008 {
2009     if (m_videoLayerWrapper)
2010         m_videoLayerWrapper->setHidden(value);
2011 }
2012
2013 void AVFWrapper::createImageGenerator()
2014 {
2015     ASSERT(isMainThread());
2016     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
2017     if (!avAsset() || m_imageGenerator)
2018         return;
2019
2020     m_imageGenerator = adoptCF(AVCFAssetImageGeneratorCreateWithAsset(kCFAllocatorDefault, avAsset()));
2021
2022     AVCFAssetImageGeneratorSetApertureMode(m_imageGenerator.get(), AVCFAssetImageGeneratorApertureModeCleanAperture);
2023     AVCFAssetImageGeneratorSetRequestedTimeToleranceBefore(m_imageGenerator.get(), kCMTimeZero);
2024     AVCFAssetImageGeneratorSetRequestedTimeToleranceAfter(m_imageGenerator.get(), kCMTimeZero);
2025     AVCFAssetImageGeneratorSetAppliesPreferredTrackTransform(m_imageGenerator.get(), true);
2026
2027     LOG(Media, "AVFWrapper::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
2028 }
2029
2030 void AVFWrapper::destroyImageGenerator()
2031 {
2032     ASSERT(isMainThread());
2033     ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue());
2034     LOG(Media, "AVFWrapper::destroyImageGenerator(%p)", this);
2035     m_imageGenerator = 0;
2036 }
2037
2038 RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(const MediaTime& time, const FloatRect& rect)
2039 {
2040     if (!m_imageGenerator)
2041         return 0;
2042
2043 #if !LOG_DISABLED
2044     MonotonicTime start = MonotonicTime::now();
2045 #endif
2046
2047     AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size()));
2048     RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), PAL::toCMTime(time), 0, 0));
2049     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawimage.get(), adoptCF(CGColorSpaceCreateDeviceRGB()).get()));
2050
2051 #if !LOG_DISABLED
2052     Seconds duration = MonotonicTime::now() - start;
2053     LOG(Media, "AVFWrapper::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration.seconds()));
2054 #endif
2055
2056     return image;
2057 }
2058
2059 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2060 AVCFMediaSelectionGroupRef AVFWrapper::safeMediaSelectionGroupForLegibleMedia() const
2061 {
2062     if (!avAsset())
2063         return 0;
2064
2065     if (AVCFAssetGetStatusOfValueForProperty(avAsset(), AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) != AVCFPropertyValueStatusLoaded)
2066         return 0;
2067
2068     return AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(), AVCFMediaCharacteristicLegible);
2069 }
2070 #endif
2071
2072 void AVFWrapper::updateVideoLayerGravity()
2073 {
2074     // We should call AVCFPlayerLayerSetVideoGravity() here, but it is not yet implemented.
2075     // FIXME: <rdar://problem/14884340>
2076 }
2077
2078 #if HAVE(AVFOUNDATION_LOADER_DELEGATE) && ENABLE(LEGACY_ENCRYPTED_MEDIA)
2079 void AVFWrapper::setRequestForKey(const String& keyURI, AVCFAssetResourceLoadingRequestRef avRequest)
2080 {
2081     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2082     if (requestsIterator != m_keyURIToRequestMap.end()) {
2083         requestsIterator->value.append(avRequest);
2084         return;
2085     }
2086
2087     Vector<RetainPtr<AVCFAssetResourceLoadingRequestRef>> requests;
2088     requests.append(avRequest);
2089     m_keyURIToRequestMap.set(keyURI, requests);
2090 }
2091
2092 RetainPtr<AVCFAssetResourceLoadingRequestRef> AVFWrapper::takeRequestForKeyURI(const String& keyURI)
2093 {
2094     auto requestsIterator = m_keyURIToRequestMap.find(keyURI);
2095     if (requestsIterator == m_keyURIToRequestMap.end())
2096         return RetainPtr<AVCFAssetResourceLoadingRequestRef>();
2097
2098     auto request = requestsIterator->value.takeLast();
2099     if (requestsIterator->value.isEmpty())
2100         m_keyURIToRequestMap.take(keyURI);
2101
2102     return request;
2103 }
2104 #endif
2105
2106 void LayerClient::platformCALayerLayoutSublayersOfLayer(PlatformCALayer* wrapperLayer)
2107 {
2108     ASSERT(isMainThread());
2109     ASSERT(m_parent);
2110     ASSERT(m_parent->videoLayerWrapper() == wrapperLayer->platformLayer());
2111
2112     CGRect bounds = wrapperLayer->bounds();
2113     CGPoint anchor = CACFLayerGetAnchorPoint(m_parent->caVideoLayer());
2114     FloatPoint position(bounds.size.width * anchor.x, bounds.size.height * anchor.y); 
2115
2116     CACFLayerSetPosition(m_parent->caVideoLayer(), position);
2117     CACFLayerSetBounds(m_parent->caVideoLayer(), bounds);
2118
2119     AVCFPlayerLayerSetFrame(m_parent->videoLayer(), CGRectMake(0, 0, bounds.size.width, bounds.size.height));
2120 }
2121
2122 } // namespace WebCore
2123
2124 #else
2125 // AVFoundation should always be enabled for Apple production builds.
2126 #if __PRODUCTION__ && !USE(AVFOUNDATION)
2127 #error AVFoundation is not enabled!
2128 #endif // __PRODUCTION__ && !USE(AVFOUNDATION)
2129 #endif // USE(AVFOUNDATION)
2130 #endif // PLATFORM(WIN) && ENABLE(VIDEO)