Improve use of NeverDestroyed
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaStreamAVFObjC.mm
1 /*
2  * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaStreamAVFObjC.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AVFoundationSPI.h"
32 #import "AudioTrackPrivateMediaStreamCocoa.h"
33 #import "Clock.h"
34 #import "GraphicsContextCG.h"
35 #import "Logging.h"
36 #import "MediaStreamPrivate.h"
37 #import "MediaTimeAVFoundation.h"
38 #import "PixelBufferConformerCV.h"
39 #import "VideoTrackPrivateMediaStream.h"
40 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
41 #import <QuartzCore/CALayer.h>
42 #import <QuartzCore/CATransaction.h>
43 #import <objc_runtime.h>
44 #import <wtf/Function.h>
45 #import <wtf/MainThread.h>
46 #import <wtf/NeverDestroyed.h>
47
48 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
49 #import "VideoFullscreenLayerManager.h"
50 #endif
51
52 #pragma mark - Soft Linking
53
54 #import "CoreMediaSoftLink.h"
55 #import "CoreVideoSoftLink.h"
56
57 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
58
59 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
60
61 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
62 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
63 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
64
65 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
66 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
67 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
68
69 using namespace WebCore;
70
71 @interface WebAVSampleBufferStatusChangeListener : NSObject {
72     MediaPlayerPrivateMediaStreamAVFObjC* _parent;
73 }
74
75 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
76 - (void)invalidate;
77 - (void)beginObservingLayers;
78 - (void)stopObservingLayers;
79 @end
80
81 @implementation WebAVSampleBufferStatusChangeListener
82
83 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)parent
84 {
85     if (!(self = [super init]))
86         return nil;
87
88     _parent = parent;
89
90     return self;
91 }
92
93 - (void)dealloc
94 {
95     [self invalidate];
96     [super dealloc];
97 }
98
99 - (void)invalidate
100 {
101     [self stopObservingLayers];
102     _parent = nullptr;
103 }
104
105 - (void)beginObservingLayers
106 {
107     ASSERT(_parent);
108     ASSERT(_parent->displayLayer());
109     ASSERT(_parent->backgroundLayer());
110
111     [_parent->displayLayer() addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
112     [_parent->displayLayer() addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nil];
113     [_parent->backgroundLayer() addObserver:self forKeyPath:@"bounds" options:NSKeyValueObservingOptionNew context:nil];
114 }
115
116 - (void)stopObservingLayers
117 {
118     if (!_parent)
119         return;
120
121     if (_parent->displayLayer()) {
122         [_parent->displayLayer() removeObserver:self forKeyPath:@"status"];
123         [_parent->displayLayer() removeObserver:self forKeyPath:@"error"];
124     }
125     if (_parent->backgroundLayer())
126         [_parent->backgroundLayer() removeObserver:self forKeyPath:@"bounds"];
127 }
128
129 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
130 {
131     UNUSED_PARAM(context);
132     UNUSED_PARAM(keyPath);
133     ASSERT(_parent);
134
135     if (!_parent)
136         return;
137
138     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
139         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
140         ASSERT(layer.get() == _parent->displayLayer());
141
142         if ([keyPath isEqualToString:@"status"]) {
143             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
144             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
145                 if (!protectedSelf->_parent)
146                     return;
147
148                 protectedSelf->_parent->layerStatusDidChange(layer.get());
149             });
150             return;
151         }
152
153         if ([keyPath isEqualToString:@"error"]) {
154             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
155             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
156                 if (!protectedSelf->_parent)
157                     return;
158
159                 protectedSelf->_parent->layerErrorDidChange(layer.get());
160             });
161             return;
162         }
163     }
164
165     if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
166         return;
167
168     if ((CALayer *)object == _parent->backgroundLayer()) {
169         if ([keyPath isEqualToString:@"bounds"]) {
170             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
171                 if (!protectedSelf->_parent)
172                     return;
173
174                 protectedSelf->_parent->backgroundLayerBoundsChanged();
175             });
176         }
177     }
178
179 }
180 @end
181
182 namespace WebCore {
183
184 #pragma mark -
185 #pragma mark MediaPlayerPrivateMediaStreamAVFObjC
186
187 static const double rendererLatency = 0.02;
188
189 MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer* player)
190     : m_player(player)
191     , m_weakPtrFactory(this)
192     , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
193     , m_clock(Clock::create())
194 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
195     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
196 #endif
197 {
198     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(%p)", this);
199 }
200
201 MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
202 {
203     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC(%p)", this);
204
205     [m_statusChangeListener invalidate];
206
207     for (const auto& track : m_audioTrackMap.values())
208         track->pause();
209
210     if (m_mediaStreamPrivate) {
211         m_mediaStreamPrivate->removeObserver(*this);
212
213         for (auto& track : m_mediaStreamPrivate->tracks())
214             track->removeObserver(*this);
215     }
216
217     destroyLayers();
218
219     m_audioTrackMap.clear();
220     m_videoTrackMap.clear();
221 }
222
223 #pragma mark -
224 #pragma mark MediaPlayer Factory Methods
225
226 void MediaPlayerPrivateMediaStreamAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
227 {
228     if (isAvailable())
229         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaStreamAVFObjC>(player); }, getSupportedTypes,
230             supportsType, 0, 0, 0, 0);
231 }
232
233 bool MediaPlayerPrivateMediaStreamAVFObjC::isAvailable()
234 {
235     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable() && getAVSampleBufferDisplayLayerClass();
236 }
237
238 void MediaPlayerPrivateMediaStreamAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
239 {
240     // FIXME: Is it really correct to list no supported types?
241     types.clear();
242 }
243
244 MediaPlayer::SupportsType MediaPlayerPrivateMediaStreamAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
245 {
246     return parameters.isMediaStream ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
247 }
248
249 #pragma mark -
250 #pragma mark AVSampleBuffer Methods
251
252 void MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue(PendingSampleQueue& queue)
253 {
254     if (queue.isEmpty())
255         return;
256
257     auto decodeTime = queue.first()->decodeTime();
258     if (!decodeTime.isValid() || decodeTime < MediaTime::zeroTime()) {
259         while (queue.size() > 5)
260             queue.removeFirst();
261
262         return;
263     }
264
265     MediaTime now = streamTime();
266     while (!queue.isEmpty()) {
267         if (queue.first()->decodeTime() > now)
268             break;
269         queue.removeFirst();
270     }
271 }
272
273 void MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue(PendingSampleQueue& queue, MediaSample& sample)
274 {
275     removeOldSamplesFromPendingQueue(queue);
276     queue.append(sample);
277 }
278
279 void MediaPlayerPrivateMediaStreamAVFObjC::updateSampleTimes(MediaSample& sample, const MediaTime& timelineOffset, const char* loggingPrefix)
280 {
281     LOG(MediaCaptureSamples, "%s(%p): original sample = %s", loggingPrefix, this, toString(sample).utf8().data());
282     sample.offsetTimestampsBy(timelineOffset);
283     LOG(MediaCaptureSamples, "%s(%p): adjusted sample = %s", loggingPrefix, this, toString(sample).utf8().data());
284
285 #if !LOG_DISABLED
286     MediaTime now = streamTime();
287     double delta = (sample.presentationTime() - now).toDouble();
288     if (delta < 0)
289         LOG(Media, "%s(%p): *NOTE* audio sample at time %s is %f seconds late", loggingPrefix, this, toString(now).utf8().data(), -delta);
290     else if (delta < .01)
291         LOG(Media, "%s(%p): *NOTE* audio sample at time %s is only %f seconds early", loggingPrefix, this, toString(now).utf8().data(), delta);
292     else if (delta > .3)
293         LOG(Media, "%s(%p): *NOTE* audio sample at time %s is %f seconds early!", loggingPrefix, this, toString(now).utf8().data(), delta);
294 #else
295     UNUSED_PARAM(loggingPrefix);
296 #endif
297
298 }
299
300 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::calculateTimelineOffset(const MediaSample& sample, double latency)
301 {
302     MediaTime sampleTime = sample.outputPresentationTime();
303     if (!sampleTime || !sampleTime.isValid())
304         sampleTime = sample.presentationTime();
305     MediaTime timelineOffset = streamTime() - sampleTime + MediaTime::createWithDouble(latency);
306     if (timelineOffset.timeScale() != sampleTime.timeScale())
307         timelineOffset = toMediaTime(CMTimeConvertScale(toCMTime(timelineOffset), sampleTime.timeScale(), kCMTimeRoundingMethod_Default));
308     return timelineOffset;
309 }
310
311 CGAffineTransform MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix(MediaSample& sample, bool forceUpdate)
312 {
313     if (!forceUpdate && m_transformIsValid)
314         return m_videoTransform;
315
316     CMSampleBufferRef sampleBuffer = sample.platformSample().sample.cmSampleBuffer;
317     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sampleBuffer));
318     size_t width = CVPixelBufferGetWidth(pixelBuffer);
319     size_t height = CVPixelBufferGetHeight(pixelBuffer);
320     if (!width || !height)
321         return CGAffineTransformIdentity;
322
323     ASSERT(m_videoRotation >= MediaSample::VideoRotation::None);
324     ASSERT(m_videoRotation <= MediaSample::VideoRotation::Left);
325
326     m_videoTransform = CGAffineTransformMakeRotation(static_cast<int>(m_videoRotation) * M_PI / 180);
327     if (sample.videoMirrored())
328         m_videoTransform = CGAffineTransformScale(m_videoTransform, -1, 1);
329
330     m_transformIsValid = true;
331     return m_videoTransform;
332 }
333
334 static void runWithoutAnimations(const WTF::Function<void()>& function)
335 {
336     [CATransaction begin];
337     [CATransaction setAnimationDuration:0];
338     [CATransaction setDisableActions:YES];
339     function();
340     [CATransaction commit];
341 }
342
343 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate& track, MediaSample& sample)
344 {
345     ASSERT(m_videoTrackMap.contains(track.id()));
346
347     if (&track != m_mediaStreamPrivate->activeVideoTrack())
348         return;
349
350     if (!m_imagePainter.mediaSample || m_displayMode != PausedImage) {
351         m_imagePainter.mediaSample = &sample;
352         m_imagePainter.cgImage = nullptr;
353         if (m_readyState < MediaPlayer::ReadyState::HaveEnoughData)
354             updateReadyState();
355     }
356
357     if (m_displayMode != LivePreview || (m_displayMode == PausedImage && m_imagePainter.mediaSample))
358         return;
359
360     auto videoTrack = m_videoTrackMap.get(track.id());
361     MediaTime timelineOffset = videoTrack->timelineOffset();
362     if (timelineOffset == MediaTime::invalidTime()) {
363         timelineOffset = calculateTimelineOffset(sample, rendererLatency);
364         videoTrack->setTimelineOffset(timelineOffset);
365         LOG(MediaCaptureSamples, "MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample: timeline offset for track %s set to %s", track.id().utf8().data(), toString(timelineOffset).utf8().data());
366     }
367
368     updateSampleTimes(sample, timelineOffset, "MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample");
369
370     if (m_sampleBufferDisplayLayer) {
371         if (sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored) {
372             m_videoRotation = sample.videoRotation();
373             m_videoMirrored = sample.videoMirrored();
374             runWithoutAnimations([this, &sample] {
375                 m_sampleBufferDisplayLayer.get().affineTransform = videoTransformationMatrix(sample, true);
376                 updateDisplayLayer();
377             });
378         }
379
380         if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
381             addSampleToPendingQueue(m_pendingVideoSampleQueue, sample);
382             requestNotificationWhenReadyForVideoData();
383             return;
384         }
385
386         [m_sampleBufferDisplayLayer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
387     }
388
389     if (!m_hasEverEnqueuedVideoFrame) {
390         m_hasEverEnqueuedVideoFrame = true;
391         m_player->firstVideoFrameAvailable();
392     }
393 }
394
395 void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
396 {
397     [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
398         [m_sampleBufferDisplayLayer stopRequestingMediaData];
399
400         while (!m_pendingVideoSampleQueue.isEmpty()) {
401             if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
402                 requestNotificationWhenReadyForVideoData();
403                 return;
404             }
405
406             auto sample = m_pendingVideoSampleQueue.takeFirst();
407             enqueueVideoSample(*m_activeVideoTrack.get(), sample.get());
408         }
409     }];
410 }
411
412 AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
413 {
414     // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
415     return nullptr;
416 }
417
418 void MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(AVSampleBufferDisplayLayer* layer)
419 {
420     UNUSED_PARAM(layer);
421     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(%p) - error = %s", this, [[layer.error localizedDescription] UTF8String]);
422 }
423
424 void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer)
425 {
426     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(%p) - status = %d", this, (int)layer.status);
427
428     if (layer.status != AVQueuedSampleBufferRenderingStatusRendering)
429         return;
430     if (!m_sampleBufferDisplayLayer || !m_activeVideoTrack || layer != m_sampleBufferDisplayLayer)
431         return;
432
433     auto track = m_videoTrackMap.get(m_activeVideoTrack->id());
434     if (track)
435         track->setTimelineOffset(MediaTime::invalidTime());
436 }
437
438 void MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers()
439 {
440     if (m_sampleBufferDisplayLayer)
441         [m_sampleBufferDisplayLayer flush];
442 }
443
444 void MediaPlayerPrivateMediaStreamAVFObjC::flushAndRemoveVideoSampleBuffers()
445 {
446     [m_sampleBufferDisplayLayer flushAndRemoveImage];
447 }
448
449 void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
450 {
451     if (m_sampleBufferDisplayLayer)
452         return;
453
454     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->activeVideoTrack() || !m_mediaStreamPrivate->activeVideoTrack()->enabled())
455         return;
456
457     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
458     if (!m_sampleBufferDisplayLayer) {
459         LOG_ERROR("MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers: +[AVSampleBufferDisplayLayer alloc] failed.");
460         return;
461     }
462
463     m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
464     m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
465     m_sampleBufferDisplayLayer.get().needsDisplayOnBoundsChange = YES;
466     m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
467
468     m_backgroundLayer = adoptNS([[CALayer alloc] init]);
469     m_backgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
470     m_backgroundLayer.get().needsDisplayOnBoundsChange = YES;
471
472     [m_statusChangeListener beginObservingLayers];
473
474     [m_backgroundLayer addSublayer:m_sampleBufferDisplayLayer.get()];
475
476 #ifndef NDEBUG
477     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer"];
478     [m_backgroundLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer parent"];
479 #endif
480
481     updateRenderingMode();
482     updateDisplayLayer();
483
484 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
485     m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
486 #endif
487 }
488
489 void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
490 {
491     [m_statusChangeListener stopObservingLayers];
492     if (m_sampleBufferDisplayLayer) {
493         m_pendingVideoSampleQueue.clear();
494         [m_sampleBufferDisplayLayer stopRequestingMediaData];
495         [m_sampleBufferDisplayLayer flush];
496         m_sampleBufferDisplayLayer = nullptr;
497     }
498     m_backgroundLayer = nullptr;
499
500     updateRenderingMode();
501     
502 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
503     m_videoFullscreenLayerManager->didDestroyVideoLayer();
504 #endif
505 }
506
507 #pragma mark -
508 #pragma mark MediaPlayerPrivateInterface Overrides
509
510 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&)
511 {
512     // This media engine only supports MediaStream URLs.
513     scheduleDeferredTask([this] {
514         setNetworkState(MediaPlayer::FormatError);
515     });
516 }
517
518 #if ENABLE(MEDIA_SOURCE)
519 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&, MediaSourcePrivateClient*)
520 {
521     // This media engine only supports MediaStream URLs.
522     scheduleDeferredTask([this] {
523         setNetworkState(MediaPlayer::FormatError);
524     });
525 }
526 #endif
527
528 void MediaPlayerPrivateMediaStreamAVFObjC::load(MediaStreamPrivate& stream)
529 {
530     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::load(%p)", this);
531
532     m_intrinsicSize = FloatSize();
533
534     m_mediaStreamPrivate = &stream;
535     m_mediaStreamPrivate->addObserver(*this);
536     m_ended = !m_mediaStreamPrivate->active();
537
538     scheduleDeferredTask([this] {
539         updateTracks();
540         setNetworkState(MediaPlayer::Idle);
541         updateReadyState();
542     });
543 }
544
545 bool MediaPlayerPrivateMediaStreamAVFObjC::didPassCORSAccessCheck() const
546 {
547     // We are only doing a check on the active video track since the sole consumer of this API is canvas.
548     // FIXME: We should change the name of didPassCORSAccessCheck if it is expected to stay like this.
549     const auto* track = m_mediaStreamPrivate->activeVideoTrack();
550     return !track || !track->isIsolated();
551 }
552
553 void MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad()
554 {
555     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad(%p)", this);
556     if (playing())
557         pause();
558 }
559
560 void MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay()
561 {
562     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay(%p)", this);
563 }
564
565 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
566 {
567     if (!m_backgroundLayer || m_displayMode == None)
568         return nullptr;
569
570 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
571     return m_videoFullscreenLayerManager->videoInlineLayer();
572 #else
573     return m_backgroundLayer.get();
574 #endif
575 }
576
577 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::displayLayer()
578 {
579     return m_sampleBufferDisplayLayer.get();
580 }
581
582 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayer()
583 {
584     return m_backgroundLayer.get();
585 }
586
587 MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
588 {
589     if (m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
590         return None;
591
592     if (auto* track = m_mediaStreamPrivate->activeVideoTrack()) {
593         if (!track->enabled() || track->muted() || track->ended())
594             return PaintItBlack;
595     }
596
597     if (playing() && !m_ended) {
598         if (!m_mediaStreamPrivate->isProducingData())
599             return PausedImage;
600         return LivePreview;
601     }
602
603     if (m_playbackState == PlaybackState::None || m_ended)
604         return PaintItBlack;
605
606     return PausedImage;
607 }
608
609 bool MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode()
610 {
611     DisplayMode displayMode = currentDisplayMode();
612
613     if (displayMode == m_displayMode)
614         return false;
615     m_displayMode = displayMode;
616
617     if (m_sampleBufferDisplayLayer) {
618         runWithoutAnimations([this] {
619             m_sampleBufferDisplayLayer.get().hidden = m_displayMode < PausedImage;
620         });
621     }
622
623     return true;
624 }
625
626 void MediaPlayerPrivateMediaStreamAVFObjC::play()
627 {
628     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::play(%p)", this);
629
630     if (!metaDataAvailable() || playing() || m_ended)
631         return;
632
633     m_playbackState = PlaybackState::Playing;
634     if (!m_clock->isRunning())
635         m_clock->start();
636
637     for (const auto& track : m_audioTrackMap.values())
638         track->play();
639
640     m_shouldDisplayFirstVideoFrame = true;
641     updateDisplayMode();
642
643     scheduleDeferredTask([this] {
644         updateReadyState();
645         if (m_player)
646             m_player->rateChanged();
647     });
648 }
649
650 void MediaPlayerPrivateMediaStreamAVFObjC::pause()
651 {
652     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::pause(%p)", this);
653
654     if (!metaDataAvailable() || !playing() || m_ended)
655         return;
656
657     m_pausedTime = currentMediaTime();
658     m_playbackState = PlaybackState::Paused;
659
660     for (const auto& track : m_audioTrackMap.values())
661         track->pause();
662
663     updateDisplayMode();
664     flushRenderers();
665
666     scheduleDeferredTask([this] {
667         if (m_player)
668             m_player->rateChanged();
669     });
670 }
671
672 void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
673 {
674     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::setVolume(%p)", this);
675
676     if (m_volume == volume)
677         return;
678
679     m_volume = volume;
680     for (const auto& track : m_audioTrackMap.values())
681         track->setVolume(m_muted ? 0 : m_volume);
682 }
683
684 void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
685 {
686     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::setMuted(%p)", this);
687
688     if (muted == m_muted)
689         return;
690
691     m_muted = muted;
692     for (const auto& track : m_audioTrackMap.values())
693         track->setVolume(m_muted ? 0 : m_volume);
694 }
695
696 bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
697 {
698     if (!metaDataAvailable())
699         return false;
700     
701     return m_mediaStreamPrivate->hasVideo();
702 }
703
704 bool MediaPlayerPrivateMediaStreamAVFObjC::hasAudio() const
705 {
706     if (!metaDataAvailable())
707         return false;
708     
709     return m_mediaStreamPrivate->hasAudio();
710 }
711
712 void MediaPlayerPrivateMediaStreamAVFObjC::setVisible(bool visible)
713 {
714     if (m_visible == visible)
715         return;
716
717     m_visible = visible;
718     if (m_visible)
719         flushRenderers();
720 }
721
722 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::durationMediaTime() const
723 {
724     return MediaTime::positiveInfiniteTime();
725 }
726
727 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::currentMediaTime() const
728 {
729     if (paused())
730         return m_pausedTime;
731
732     return streamTime();
733 }
734
735 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
736 {
737     return MediaTime::createWithDouble(m_clock->currentTime());
738 }
739
740 MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
741 {
742     return m_networkState;
743 }
744
745 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::readyState() const
746 {
747     return m_readyState;
748 }
749
750 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState()
751 {
752     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->active() || !m_mediaStreamPrivate->tracks().size())
753         return MediaPlayer::ReadyState::HaveNothing;
754
755     bool allTracksAreLive = true;
756     for (auto& track : m_mediaStreamPrivate->tracks()) {
757         if (!track->enabled() || track->readyState() != MediaStreamTrackPrivate::ReadyState::Live)
758             allTracksAreLive = false;
759
760         if (track == m_mediaStreamPrivate->activeVideoTrack() && !m_imagePainter.mediaSample) {
761             if (!m_haveSeenMetadata)
762                 return MediaPlayer::ReadyState::HaveNothing;
763             allTracksAreLive = false;
764         }
765     }
766
767     if (!allTracksAreLive && !m_haveSeenMetadata)
768         return MediaPlayer::ReadyState::HaveMetadata;
769
770     return MediaPlayer::ReadyState::HaveEnoughData;
771 }
772
773 void MediaPlayerPrivateMediaStreamAVFObjC::updateReadyState()
774 {
775     MediaPlayer::ReadyState newReadyState = currentReadyState();
776
777     if (newReadyState != m_readyState)
778         setReadyState(newReadyState);
779 }
780
781 void MediaPlayerPrivateMediaStreamAVFObjC::activeStatusChanged()
782 {
783     scheduleDeferredTask([this] {
784         bool ended = !m_mediaStreamPrivate->active();
785         if (ended && playing())
786             pause();
787
788         updateReadyState();
789         updateDisplayMode();
790
791         if (ended != m_ended) {
792             m_ended = ended;
793             if (m_player) {
794                 m_player->timeChanged();
795                 m_player->characteristicChanged();
796             }
797         }
798     });
799 }
800
801 void MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode()
802 {
803     if (!updateDisplayMode())
804         return;
805
806     scheduleDeferredTask([this] {
807         m_transformIsValid = false;
808         if (m_player)
809             m_player->client().mediaPlayerRenderingModeChanged(m_player);
810     });
811
812 }
813
814 void MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged()
815 {
816     bool sizeChanged = false;
817
818     FloatSize intrinsicSize = m_mediaStreamPrivate->intrinsicSize();
819     if (intrinsicSize.height() != m_intrinsicSize.height() || intrinsicSize.width() != m_intrinsicSize.width()) {
820         m_intrinsicSize = intrinsicSize;
821         sizeChanged = true;
822     }
823
824     updateTracks();
825     updateDisplayMode();
826
827     scheduleDeferredTask([this, sizeChanged] {
828         updateReadyState();
829
830         if (!m_player)
831             return;
832
833         m_player->characteristicChanged();
834         if (sizeChanged) {
835             m_player->sizeChanged();
836         }
837     });
838 }
839
840 void MediaPlayerPrivateMediaStreamAVFObjC::didAddTrack(MediaStreamTrackPrivate&)
841 {
842     updateTracks();
843 }
844
845 void MediaPlayerPrivateMediaStreamAVFObjC::didRemoveTrack(MediaStreamTrackPrivate&)
846 {
847     updateTracks();
848 }
849
850 void MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated(MediaStreamTrackPrivate& track, MediaSample& mediaSample)
851 {
852     ASSERT(track.id() == mediaSample.trackID());
853     ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
854     ASSERT(m_mediaStreamPrivate);
855
856     if (streamTime().toDouble() < 0)
857         return;
858
859     switch (track.type()) {
860     case RealtimeMediaSource::Type::None:
861         // Do nothing.
862         break;
863     case RealtimeMediaSource::Type::Audio:
864         break;
865     case RealtimeMediaSource::Type::Video:
866         if (&track == m_activeVideoTrack.get())
867             enqueueVideoSample(track, mediaSample);
868         break;
869     }
870 }
871
872 void MediaPlayerPrivateMediaStreamAVFObjC::readyStateChanged(MediaStreamTrackPrivate&)
873 {
874     scheduleDeferredTask([this] {
875         updateReadyState();
876     });
877 }
878
879 bool MediaPlayerPrivateMediaStreamAVFObjC::supportsPictureInPicture() const
880 {
881 #if PLATFORM(IOS)
882     for (const auto& track : m_videoTrackMap.values()) {
883         if (track->streamTrack().isCaptureTrack())
884             return false;
885     }
886 #endif
887     
888     return true;
889 }
890
891 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
892 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
893 {
894     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
895         completionHandler();
896         return;
897     }
898
899     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
900 }
901
902 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenFrame(FloatRect frame)
903 {
904     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
905 }
906 #endif
907
908 typedef enum {
909     Add,
910     Remove,
911     Configure
912 } TrackState;
913
914 template <typename RefT>
915 void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), const Function<void(RefT, int, TrackState)>& configureTrack)
916 {
917     Vector<RefT> removedTracks;
918     Vector<RefT> addedTracks;
919     Vector<RefPtr<MediaStreamTrackPrivate>> addedPrivateTracks;
920
921     for (const auto& track : currentTracks) {
922         if (track->type() != trackType)
923             continue;
924
925         if (!trackMap.contains(track->id()))
926             addedPrivateTracks.append(track);
927     }
928
929     for (const auto& track : trackMap.values()) {
930         auto& streamTrack = track->streamTrack();
931         if (currentTracks.contains(&streamTrack))
932             continue;
933
934         removedTracks.append(track);
935         trackMap.remove(streamTrack.id());
936     }
937
938     for (auto& track : addedPrivateTracks) {
939         RefT newTrack = itemFactory(*track.get());
940         trackMap.add(track->id(), newTrack);
941         addedTracks.append(newTrack);
942     }
943
944     int index = 0;
945     for (auto& track : removedTracks)
946         configureTrack(track, index++, TrackState::Remove);
947
948     index = 0;
949     for (auto& track : addedTracks)
950         configureTrack(track, index++, TrackState::Add);
951
952     index = 0;
953     for (const auto& track : trackMap.values())
954         configureTrack(track, index++, TrackState::Configure);
955 }
956
957 void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
958 {
959     if (m_pendingSelectedTrackCheck)
960         return;
961
962     m_pendingSelectedTrackCheck = true;
963     scheduleDeferredTask([this] {
964         auto oldVideoTrack = m_activeVideoTrack;
965         bool hideVideoLayer = true;
966         m_activeVideoTrack = nullptr;
967         if (m_mediaStreamPrivate->activeVideoTrack()) {
968             for (const auto& track : m_videoTrackMap.values()) {
969                 if (&track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack()) {
970                     m_activeVideoTrack = m_mediaStreamPrivate->activeVideoTrack();
971                     if (track->selected())
972                         hideVideoLayer = false;
973                     break;
974                 }
975             }
976         }
977
978         if (oldVideoTrack != m_activeVideoTrack)
979             m_imagePainter.reset();
980         ensureLayers();
981         m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer || m_displayMode < PausedImage;
982         m_pendingSelectedTrackCheck = false;
983         updateDisplayMode();
984     });
985 }
986
987 void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
988 {
989     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
990
991     Function<void(RefPtr<AudioTrackPrivateMediaStreamCocoa>, int, TrackState)>  setAudioTrackState = [this](auto track, int index, TrackState state)
992     {
993         switch (state) {
994         case TrackState::Remove:
995             m_player->removeAudioTrack(*track);
996             break;
997         case TrackState::Add:
998             track->streamTrack().addObserver(*this);
999             m_player->addAudioTrack(*track);
1000             break;
1001         case TrackState::Configure:
1002             track->setTrackIndex(index);
1003             bool enabled = track->streamTrack().enabled() && !track->streamTrack().muted();
1004             track->setEnabled(enabled);
1005             break;
1006         }
1007     };
1008     updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Type::Audio, currentTracks, &AudioTrackPrivateMediaStreamCocoa::create, setAudioTrackState);
1009
1010     Function<void(RefPtr<VideoTrackPrivateMediaStream>, int, TrackState)> setVideoTrackState = [&](auto track, int index, TrackState state)
1011     {
1012         switch (state) {
1013         case TrackState::Remove:
1014             track->streamTrack().removeObserver(*this);
1015             m_player->removeVideoTrack(*track);
1016             checkSelectedVideoTrack();
1017             break;
1018         case TrackState::Add:
1019             track->streamTrack().addObserver(*this);
1020             m_player->addVideoTrack(*track);
1021             break;
1022         case TrackState::Configure:
1023             track->setTrackIndex(index);
1024             bool selected = &track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack();
1025             track->setSelected(selected);
1026             checkSelectedVideoTrack();
1027             break;
1028         }
1029     };
1030     updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Type::Video, currentTracks, &VideoTrackPrivateMediaStream::create, setVideoTrackState);
1031 }
1032
1033 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
1034 {
1035     return std::make_unique<PlatformTimeRanges>();
1036 }
1037
1038 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::buffered() const
1039 {
1040     return std::make_unique<PlatformTimeRanges>();
1041 }
1042
1043 void MediaPlayerPrivateMediaStreamAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
1044 {
1045     paintCurrentFrameInContext(context, rect);
1046 }
1047
1048 void MediaPlayerPrivateMediaStreamAVFObjC::updateCurrentFrameImage()
1049 {
1050     if (m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1051         return;
1052
1053     if (!m_imagePainter.pixelBufferConformer)
1054         m_imagePainter.pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)@{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) });
1055
1056     ASSERT(m_imagePainter.pixelBufferConformer);
1057     if (!m_imagePainter.pixelBufferConformer)
1058         return;
1059
1060     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_imagePainter.mediaSample->platformSample().sample.cmSampleBuffer));
1061     m_imagePainter.cgImage = m_imagePainter.pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer);
1062 }
1063
1064 void MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& destRect)
1065 {
1066     if (m_displayMode == None || !metaDataAvailable() || context.paintingDisabled())
1067         return;
1068
1069     if (m_displayMode != PaintItBlack && m_imagePainter.mediaSample)
1070         updateCurrentFrameImage();
1071
1072     GraphicsContextStateSaver stateSaver(context);
1073     if (m_displayMode == PaintItBlack || !m_imagePainter.cgImage || !m_imagePainter.mediaSample) {
1074         context.fillRect(IntRect(IntPoint(), IntSize(destRect.width(), destRect.height())), Color::black);
1075         return;
1076     }
1077
1078     auto image = m_imagePainter.cgImage.get();
1079     FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
1080     AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
1081     FloatRect transformedDestRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(destRect);
1082     context.concatCTM(videoTransform);
1083     context.drawNativeImage(image, imageRect.size(), transformedDestRect, imageRect);
1084 }
1085
1086 void MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged()
1087 {
1088     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
1089         ensureLayers();
1090     else
1091         destroyLayers();
1092 }
1093
1094 String MediaPlayerPrivateMediaStreamAVFObjC::engineDescription() const
1095 {
1096     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaStream Engine"));
1097     return description;
1098 }
1099
1100 void MediaPlayerPrivateMediaStreamAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1101 {
1102     if (m_readyState == readyState)
1103         return;
1104
1105     if (readyState != MediaPlayer::ReadyState::HaveNothing)
1106         m_haveSeenMetadata = true;
1107     m_readyState = readyState;
1108     characteristicsChanged();
1109
1110     m_player->readyStateChanged();
1111 }
1112
1113 void MediaPlayerPrivateMediaStreamAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1114 {
1115     if (m_networkState == networkState)
1116         return;
1117
1118     m_networkState = networkState;
1119     m_player->networkStateChanged();
1120 }
1121
1122 void MediaPlayerPrivateMediaStreamAVFObjC::setShouldBufferData(bool shouldBuffer)
1123 {
1124     if (!shouldBuffer)
1125         flushAndRemoveVideoSampleBuffers();
1126 }
1127
1128 void MediaPlayerPrivateMediaStreamAVFObjC::scheduleDeferredTask(Function<void ()>&& function)
1129 {
1130     ASSERT(function);
1131     callOnMainThread([weakThis = createWeakPtr(), function = WTFMove(function)] {
1132         if (!weakThis)
1133             return;
1134
1135         function();
1136     });
1137 }
1138
1139 void MediaPlayerPrivateMediaStreamAVFObjC::CurrentFramePainter::reset()
1140 {
1141     cgImage = nullptr;
1142     mediaSample = nullptr;
1143     pixelBufferConformer = nullptr;
1144 }
1145
1146 void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
1147 {
1148     if (!m_backgroundLayer || !m_sampleBufferDisplayLayer)
1149         return;
1150
1151     auto backgroundBounds = m_backgroundLayer.get().bounds;
1152     auto videoBounds = backgroundBounds;
1153     if (m_videoRotation == MediaSample::VideoRotation::Right || m_videoRotation == MediaSample::VideoRotation::Left)
1154         std::swap(videoBounds.size.width, videoBounds.size.height);
1155
1156     m_sampleBufferDisplayLayer.get().bounds = videoBounds;
1157     m_sampleBufferDisplayLayer.get().position = { backgroundBounds.size.width / 2, backgroundBounds.size.height / 2};
1158 }
1159
1160 void MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayerBoundsChanged()
1161 {
1162     scheduleDeferredTask([this] {
1163         runWithoutAnimations([this] {
1164             updateDisplayLayer();
1165         });
1166     });
1167 }
1168
1169 }
1170
1171 #endif