cda97c91fda083def6acf5fac3ded2f448171d4c
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaStreamAVFObjC.mm
1 /*
2  * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaStreamAVFObjC.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AudioTrackPrivateMediaStreamCocoa.h"
32 #import "GraphicsContextCG.h"
33 #import "Logging.h"
34 #import "MediaStreamPrivate.h"
35 #import "PixelBufferConformerCV.h"
36 #import "VideoFullscreenLayerManagerObjC.h"
37 #import "VideoTrackPrivateMediaStream.h"
38 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
39 #import <QuartzCore/CALayer.h>
40 #import <QuartzCore/CATransaction.h>
41 #import <objc_runtime.h>
42 #import <pal/avfoundation/MediaTimeAVFoundation.h>
43 #import <pal/spi/mac/AVFoundationSPI.h>
44 #import <pal/system/Clock.h>
45 #import <wtf/Function.h>
46 #import <wtf/MainThread.h>
47 #import <wtf/NeverDestroyed.h>
48
49 #import "CoreVideoSoftLink.h"
50 #import <pal/cf/CoreMediaSoftLink.h>
51 #import <pal/cocoa/AVFoundationSoftLink.h>
52
53 using namespace WebCore;
54
55 @interface WebAVSampleBufferStatusChangeListener : NSObject {
56     MediaPlayerPrivateMediaStreamAVFObjC* _parent;
57 }
58
59 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
60 - (void)invalidate;
61 - (void)beginObservingLayers;
62 - (void)stopObservingLayers;
63 @end
64
65 @implementation WebAVSampleBufferStatusChangeListener
66
67 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)parent
68 {
69     if (!(self = [super init]))
70         return nil;
71
72     _parent = parent;
73
74     return self;
75 }
76
77 - (void)dealloc
78 {
79     [self invalidate];
80     [super dealloc];
81 }
82
83 - (void)invalidate
84 {
85     [self stopObservingLayers];
86     _parent = nullptr;
87 }
88
89 - (void)beginObservingLayers
90 {
91     ASSERT(_parent);
92     ASSERT(_parent->displayLayer());
93     ASSERT(_parent->backgroundLayer());
94
95     [_parent->displayLayer() addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
96     [_parent->displayLayer() addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nil];
97     [_parent->backgroundLayer() addObserver:self forKeyPath:@"bounds" options:NSKeyValueObservingOptionNew context:nil];
98 }
99
100 - (void)stopObservingLayers
101 {
102     if (!_parent)
103         return;
104
105     if (_parent->displayLayer()) {
106         [_parent->displayLayer() removeObserver:self forKeyPath:@"status"];
107         [_parent->displayLayer() removeObserver:self forKeyPath:@"error"];
108     }
109     if (_parent->backgroundLayer())
110         [_parent->backgroundLayer() removeObserver:self forKeyPath:@"bounds"];
111 }
112
113 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
114 {
115     UNUSED_PARAM(context);
116     UNUSED_PARAM(keyPath);
117     ASSERT(_parent);
118
119     if (!_parent)
120         return;
121
122     if ([object isKindOfClass:PAL::getAVSampleBufferDisplayLayerClass()]) {
123         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
124         ASSERT(layer.get() == _parent->displayLayer());
125
126         if ([keyPath isEqualToString:@"status"]) {
127             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
128             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
129                 if (!protectedSelf->_parent)
130                     return;
131
132                 protectedSelf->_parent->layerStatusDidChange(layer.get());
133             });
134             return;
135         }
136
137         if ([keyPath isEqualToString:@"error"]) {
138             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
139             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
140                 if (!protectedSelf->_parent)
141                     return;
142
143                 protectedSelf->_parent->layerErrorDidChange(layer.get());
144             });
145             return;
146         }
147     }
148
149     if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
150         return;
151
152     if ((CALayer *)object == _parent->backgroundLayer()) {
153         if ([keyPath isEqualToString:@"bounds"]) {
154             if (!_parent)
155                 return;
156
157             if (isMainThread()) {
158                 _parent->backgroundLayerBoundsChanged();
159                 return;
160             }
161
162             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
163                 if (!protectedSelf->_parent)
164                     return;
165
166                 protectedSelf->_parent->backgroundLayerBoundsChanged();
167             });
168         }
169     }
170
171 }
172 @end
173
174 namespace WebCore {
175 using namespace PAL;
176
177 #pragma mark -
178 #pragma mark MediaPlayerPrivateMediaStreamAVFObjC
179
180 static const double rendererLatency = 0.02;
181
182 MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer* player)
183     : m_player(player)
184     , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
185     , m_clock(PAL::Clock::create())
186     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
187 #if !RELEASE_LOG_DISABLED
188     , m_logger(player->mediaPlayerLogger())
189     , m_logIdentifier(player->mediaPlayerLogIdentifier())
190 #endif
191 {
192     INFO_LOG(LOGIDENTIFIER);
193 }
194
195 MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
196 {
197     INFO_LOG(LOGIDENTIFIER);
198
199     [m_statusChangeListener invalidate];
200
201     for (const auto& track : m_audioTrackMap.values())
202         track->pause();
203
204     if (m_mediaStreamPrivate) {
205         m_mediaStreamPrivate->removeObserver(*this);
206
207         for (auto& track : m_mediaStreamPrivate->tracks())
208             track->removeObserver(*this);
209     }
210
211     destroyLayers();
212
213     m_audioTrackMap.clear();
214     m_videoTrackMap.clear();
215 }
216
217 #pragma mark -
218 #pragma mark MediaPlayer Factory Methods
219
220 void MediaPlayerPrivateMediaStreamAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
221 {
222     if (isAvailable())
223         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaStreamAVFObjC>(player); }, getSupportedTypes,
224             supportsType, 0, 0, 0, 0);
225 }
226
227 bool MediaPlayerPrivateMediaStreamAVFObjC::isAvailable()
228 {
229     return PAL::AVFoundationLibrary() && isCoreMediaFrameworkAvailable() && getAVSampleBufferDisplayLayerClass();
230 }
231
232 void MediaPlayerPrivateMediaStreamAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
233 {
234     // FIXME: Is it really correct to list no supported types?
235     types.clear();
236 }
237
238 MediaPlayer::SupportsType MediaPlayerPrivateMediaStreamAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
239 {
240     return parameters.isMediaStream ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
241 }
242
243 #pragma mark -
244 #pragma mark AVSampleBuffer Methods
245
246 void MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue(PendingSampleQueue& queue)
247 {
248     if (queue.isEmpty())
249         return;
250
251     auto decodeTime = queue.first()->decodeTime();
252     if (!decodeTime.isValid() || decodeTime < MediaTime::zeroTime()) {
253         while (queue.size() > 5)
254             queue.removeFirst();
255
256         return;
257     }
258
259     MediaTime now = streamTime();
260     while (!queue.isEmpty()) {
261         if (queue.first()->decodeTime() > now)
262             break;
263         queue.removeFirst();
264     }
265 }
266
267 void MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue(PendingSampleQueue& queue, MediaSample& sample)
268 {
269     removeOldSamplesFromPendingQueue(queue);
270     queue.append(sample);
271 }
272
273 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::calculateTimelineOffset(const MediaSample& sample, double latency)
274 {
275     MediaTime sampleTime = sample.outputPresentationTime();
276     if (!sampleTime || !sampleTime.isValid())
277         sampleTime = sample.presentationTime();
278     MediaTime timelineOffset = streamTime() - sampleTime + MediaTime::createWithDouble(latency);
279     if (timelineOffset.timeScale() != sampleTime.timeScale())
280         timelineOffset = PAL::toMediaTime(CMTimeConvertScale(PAL::toCMTime(timelineOffset), sampleTime.timeScale(), kCMTimeRoundingMethod_Default));
281     return timelineOffset;
282 }
283
284 CGAffineTransform MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix(MediaSample& sample, bool forceUpdate)
285 {
286     if (!forceUpdate && m_transformIsValid)
287         return m_videoTransform;
288
289     CMSampleBufferRef sampleBuffer = sample.platformSample().sample.cmSampleBuffer;
290     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sampleBuffer));
291     size_t width = CVPixelBufferGetWidth(pixelBuffer);
292     size_t height = CVPixelBufferGetHeight(pixelBuffer);
293     if (!width || !height)
294         return CGAffineTransformIdentity;
295
296     ASSERT(m_videoRotation >= MediaSample::VideoRotation::None);
297     ASSERT(m_videoRotation <= MediaSample::VideoRotation::Left);
298
299     m_videoTransform = CGAffineTransformMakeRotation(static_cast<int>(m_videoRotation) * M_PI / 180);
300     if (sample.videoMirrored())
301         m_videoTransform = CGAffineTransformScale(m_videoTransform, -1, 1);
302
303     m_transformIsValid = true;
304     return m_videoTransform;
305 }
306
307 static void runWithoutAnimations(const WTF::Function<void()>& function)
308 {
309     [CATransaction begin];
310     [CATransaction setAnimationDuration:0];
311     [CATransaction setDisableActions:YES];
312     function();
313     [CATransaction commit];
314 }
315
316 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueCorrectedVideoSample(MediaSample& sample)
317 {
318     if (m_sampleBufferDisplayLayer) {
319         if ([m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed)
320             return;
321
322         if (sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored) {
323             m_videoRotation = sample.videoRotation();
324             m_videoMirrored = sample.videoMirrored();
325             runWithoutAnimations([this, &sample] {
326                 m_sampleBufferDisplayLayer.get().affineTransform = videoTransformationMatrix(sample, true);
327                 updateDisplayLayer();
328             });
329         }
330
331         if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
332             addSampleToPendingQueue(m_pendingVideoSampleQueue, sample);
333             requestNotificationWhenReadyForVideoData();
334             return;
335         }
336
337         [m_sampleBufferDisplayLayer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
338     }
339
340     if (!m_hasEverEnqueuedVideoFrame) {
341         m_hasEverEnqueuedVideoFrame = true;
342         m_player->firstVideoFrameAvailable();
343     }
344 }
345
346 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate& track, MediaSample& sample)
347 {
348     if (&track != m_mediaStreamPrivate->activeVideoTrack())
349         return;
350
351     if (!m_imagePainter.mediaSample || m_displayMode != PausedImage) {
352         m_imagePainter.mediaSample = &sample;
353         m_imagePainter.cgImage = nullptr;
354         if (m_readyState < MediaPlayer::ReadyState::HaveEnoughData)
355             updateReadyState();
356     }
357
358     if (m_displayMode != LivePreview && !m_waitingForFirstImage)
359         return;
360
361     auto videoTrack = m_videoTrackMap.get(track.id());
362     MediaTime timelineOffset = videoTrack->timelineOffset();
363     if (timelineOffset == MediaTime::invalidTime()) {
364         timelineOffset = calculateTimelineOffset(sample, rendererLatency);
365         videoTrack->setTimelineOffset(timelineOffset);
366
367         INFO_LOG(LOGIDENTIFIER, "timeline offset for track ", track.id(), " set to ", timelineOffset);
368     }
369
370     DEBUG_LOG(LOGIDENTIFIER, "original sample = ", sample);
371     sample.offsetTimestampsBy(timelineOffset);
372     DEBUG_LOG(LOGIDENTIFIER, "updated sample = ", sample);
373
374     if (WILL_LOG(WTFLogLevel::Debug)) {
375         MediaTime now = streamTime();
376         double delta = (sample.presentationTime() - now).toDouble();
377         if (delta < 0)
378             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* sample at time is ", now, " is", -delta, " seconds late");
379         else if (delta < .01)
380             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is only ", delta, " seconds early");
381         else if (delta > .3)
382             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is ", delta, " seconds early!");
383     }
384
385     enqueueCorrectedVideoSample(sample);
386     if (m_waitingForFirstImage) {
387         m_waitingForFirstImage = false;
388         updateDisplayMode();
389     }
390 }
391
392 void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
393 {
394     auto weakThis = makeWeakPtr(*this);
395     [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
396         if (!weakThis)
397             return;
398
399         [m_sampleBufferDisplayLayer stopRequestingMediaData];
400
401         if (!m_activeVideoTrack) {
402             m_pendingVideoSampleQueue.clear();
403             return;
404         }
405
406         while (!m_pendingVideoSampleQueue.isEmpty()) {
407             if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
408                 requestNotificationWhenReadyForVideoData();
409                 return;
410             }
411
412             auto sample = m_pendingVideoSampleQueue.takeFirst();
413             enqueueVideoSample(*m_activeVideoTrack.get(), sample.get());
414         }
415     }];
416 }
417
418 AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
419 {
420     // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
421     return nullptr;
422 }
423
424 void MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(AVSampleBufferDisplayLayer* layer)
425 {
426     UNUSED_PARAM(layer);
427     ERROR_LOG(LOGIDENTIFIER, "error = ", [[layer.error localizedDescription] UTF8String]);
428 }
429
430 void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer)
431 {
432     ALWAYS_LOG(LOGIDENTIFIER, "status = ", (int)layer.status);
433
434     if (layer.status != AVQueuedSampleBufferRenderingStatusRendering)
435         return;
436     if (!m_sampleBufferDisplayLayer || !m_activeVideoTrack || layer != m_sampleBufferDisplayLayer)
437         return;
438
439     auto track = m_videoTrackMap.get(m_activeVideoTrack->id());
440     if (track)
441         track->setTimelineOffset(MediaTime::invalidTime());
442 }
443
444 void MediaPlayerPrivateMediaStreamAVFObjC::applicationDidBecomeActive()
445 {
446     if (m_sampleBufferDisplayLayer && [m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed) {
447         flushRenderers();
448         if (m_imagePainter.mediaSample)
449             enqueueCorrectedVideoSample(*m_imagePainter.mediaSample);
450         updateDisplayMode();
451     }
452 }
453
454 void MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers()
455 {
456     if (m_sampleBufferDisplayLayer)
457         [m_sampleBufferDisplayLayer flush];
458 }
459
460 void MediaPlayerPrivateMediaStreamAVFObjC::flushAndRemoveVideoSampleBuffers()
461 {
462     [m_sampleBufferDisplayLayer flushAndRemoveImage];
463 }
464
465 void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
466 {
467     if (m_sampleBufferDisplayLayer)
468         return;
469
470     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->activeVideoTrack() || !m_mediaStreamPrivate->activeVideoTrack()->enabled())
471         return;
472
473     m_sampleBufferDisplayLayer = adoptNS([PAL::allocAVSampleBufferDisplayLayerInstance() init]);
474     if (!m_sampleBufferDisplayLayer) {
475         ERROR_LOG(LOGIDENTIFIER, "+[AVSampleBufferDisplayLayer alloc] failed.");
476         return;
477     }
478
479     m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
480     m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
481     m_sampleBufferDisplayLayer.get().needsDisplayOnBoundsChange = YES;
482     m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
483
484     m_backgroundLayer = adoptNS([[CALayer alloc] init]);
485     m_backgroundLayer.get().hidden = hideBackgroundLayer();
486
487     m_backgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
488     m_backgroundLayer.get().needsDisplayOnBoundsChange = YES;
489
490     auto size = snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size();
491     m_backgroundLayer.get().bounds = CGRectMake(0, 0, size.width(), size.height());
492
493     [m_statusChangeListener beginObservingLayers];
494
495     [m_backgroundLayer addSublayer:m_sampleBufferDisplayLayer.get()];
496
497 #ifndef NDEBUG
498     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer"];
499     [m_backgroundLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer parent"];
500 #endif
501
502     updateRenderingMode();
503     updateDisplayLayer();
504
505     m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), size);
506 }
507
508 void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
509 {
510     [m_statusChangeListener stopObservingLayers];
511     if (m_sampleBufferDisplayLayer) {
512         m_pendingVideoSampleQueue.clear();
513         [m_sampleBufferDisplayLayer stopRequestingMediaData];
514         [m_sampleBufferDisplayLayer flush];
515         m_sampleBufferDisplayLayer = nullptr;
516     }
517     m_backgroundLayer = nullptr;
518
519     updateRenderingMode();
520     
521     m_videoFullscreenLayerManager->didDestroyVideoLayer();
522 }
523
524 #pragma mark -
525 #pragma mark MediaPlayerPrivateInterface Overrides
526
527 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&)
528 {
529     // This media engine only supports MediaStream URLs.
530     scheduleDeferredTask([this] {
531         setNetworkState(MediaPlayer::FormatError);
532     });
533 }
534
535 #if ENABLE(MEDIA_SOURCE)
536 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&, MediaSourcePrivateClient*)
537 {
538     // This media engine only supports MediaStream URLs.
539     scheduleDeferredTask([this] {
540         setNetworkState(MediaPlayer::FormatError);
541     });
542 }
543 #endif
544
545 void MediaPlayerPrivateMediaStreamAVFObjC::load(MediaStreamPrivate& stream)
546 {
547     INFO_LOG(LOGIDENTIFIER);
548
549     m_intrinsicSize = FloatSize();
550
551     m_mediaStreamPrivate = &stream;
552     m_mediaStreamPrivate->addObserver(*this);
553     m_ended = !m_mediaStreamPrivate->active();
554
555     scheduleDeferredTask([this] {
556         updateTracks();
557         setNetworkState(MediaPlayer::Idle);
558         updateReadyState();
559     });
560 }
561
562 bool MediaPlayerPrivateMediaStreamAVFObjC::didPassCORSAccessCheck() const
563 {
564     // We are only doing a check on the active video track since the sole consumer of this API is canvas.
565     // FIXME: We should change the name of didPassCORSAccessCheck if it is expected to stay like this.
566     const auto* track = m_mediaStreamPrivate->activeVideoTrack();
567     return !track || !track->isIsolated();
568 }
569
570 void MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad()
571 {
572     INFO_LOG(LOGIDENTIFIER);
573     if (playing())
574         pause();
575 }
576
577 void MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay()
578 {
579     INFO_LOG(LOGIDENTIFIER);
580 }
581
582 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
583 {
584     if (!m_backgroundLayer || m_displayMode == None)
585         return nullptr;
586
587     return m_videoFullscreenLayerManager->videoInlineLayer();
588 }
589
590 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::displayLayer()
591 {
592     return m_sampleBufferDisplayLayer.get();
593 }
594
595 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayer()
596 {
597     return m_backgroundLayer.get();
598 }
599
600 MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
601 {
602     if (m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
603         return None;
604
605     if (auto* track = m_mediaStreamPrivate->activeVideoTrack()) {
606         if (!track->enabled() || track->muted() || track->ended())
607             return PaintItBlack;
608     }
609
610     if (m_waitingForFirstImage)
611         return WaitingForFirstImage;
612
613     if (playing() && !m_ended) {
614         if (!m_mediaStreamPrivate->isProducingData())
615             return PausedImage;
616         return LivePreview;
617     }
618
619     if (m_playbackState == PlaybackState::None || m_ended)
620         return PaintItBlack;
621
622     return PausedImage;
623 }
624
625 bool MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode()
626 {
627     DisplayMode displayMode = currentDisplayMode();
628
629     if (displayMode == m_displayMode)
630         return false;
631
632     INFO_LOG(LOGIDENTIFIER, "updated to ", static_cast<int>(displayMode));
633     m_displayMode = displayMode;
634
635     auto hidden = m_displayMode < PausedImage;
636     if (m_sampleBufferDisplayLayer && m_sampleBufferDisplayLayer.get().hidden != hidden) {
637         runWithoutAnimations([this, hidden] {
638             m_sampleBufferDisplayLayer.get().hidden = hidden;
639         });
640     }
641     hidden = hideBackgroundLayer();
642     if (m_backgroundLayer && m_backgroundLayer.get().hidden != hidden) {
643         runWithoutAnimations([this, hidden] {
644             m_backgroundLayer.get().hidden = hidden;
645         });
646     }
647
648     return true;
649 }
650
651 void MediaPlayerPrivateMediaStreamAVFObjC::play()
652 {
653     ALWAYS_LOG(LOGIDENTIFIER);
654
655     if (!metaDataAvailable() || playing() || m_ended)
656         return;
657
658     m_playbackState = PlaybackState::Playing;
659     if (!m_clock->isRunning())
660         m_clock->start();
661
662     for (const auto& track : m_audioTrackMap.values())
663         track->play();
664
665     updateDisplayMode();
666
667     scheduleDeferredTask([this] {
668         updateReadyState();
669         if (m_player)
670             m_player->rateChanged();
671     });
672 }
673
674 void MediaPlayerPrivateMediaStreamAVFObjC::pause()
675 {
676     ALWAYS_LOG(LOGIDENTIFIER);
677
678     if (!metaDataAvailable() || !playing() || m_ended)
679         return;
680
681     m_pausedTime = currentMediaTime();
682     m_playbackState = PlaybackState::Paused;
683
684     for (const auto& track : m_audioTrackMap.values())
685         track->pause();
686
687     updateDisplayMode();
688     flushRenderers();
689
690     scheduleDeferredTask([this] {
691         if (m_player)
692             m_player->rateChanged();
693     });
694 }
695
696 void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
697 {
698     if (m_volume == volume)
699         return;
700
701     ALWAYS_LOG(LOGIDENTIFIER, volume);
702     m_volume = volume;
703     for (const auto& track : m_audioTrackMap.values())
704         track->setVolume(m_muted ? 0 : m_volume);
705 }
706
707 void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
708 {
709     if (muted == m_muted)
710         return;
711
712     ALWAYS_LOG(LOGIDENTIFIER, muted);
713     m_muted = muted;
714     for (const auto& track : m_audioTrackMap.values())
715         track->setVolume(m_muted ? 0 : m_volume);
716 }
717
718 bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
719 {
720     if (!metaDataAvailable())
721         return false;
722     
723     return m_mediaStreamPrivate->hasVideo();
724 }
725
726 bool MediaPlayerPrivateMediaStreamAVFObjC::hasAudio() const
727 {
728     if (!metaDataAvailable())
729         return false;
730     
731     return m_mediaStreamPrivate->hasAudio();
732 }
733
734 void MediaPlayerPrivateMediaStreamAVFObjC::setVisible(bool visible)
735 {
736     if (m_visible == visible)
737         return;
738
739     m_visible = visible;
740     if (m_visible)
741         flushRenderers();
742 }
743
744 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::durationMediaTime() const
745 {
746     return MediaTime::positiveInfiniteTime();
747 }
748
749 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::currentMediaTime() const
750 {
751     if (paused())
752         return m_pausedTime;
753
754     return streamTime();
755 }
756
757 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
758 {
759     return MediaTime::createWithDouble(m_clock->currentTime());
760 }
761
762 MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
763 {
764     return m_networkState;
765 }
766
767 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::readyState() const
768 {
769     return m_readyState;
770 }
771
772 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState()
773 {
774     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->active() || !m_mediaStreamPrivate->tracks().size())
775         return MediaPlayer::ReadyState::HaveNothing;
776
777     bool allTracksAreLive = true;
778     for (auto& track : m_mediaStreamPrivate->tracks()) {
779         if (!track->enabled() || track->readyState() != MediaStreamTrackPrivate::ReadyState::Live)
780             allTracksAreLive = false;
781
782         if (track == m_mediaStreamPrivate->activeVideoTrack() && !m_imagePainter.mediaSample) {
783             if (!m_haveSeenMetadata || m_waitingForFirstImage)
784                 return MediaPlayer::ReadyState::HaveNothing;
785             allTracksAreLive = false;
786         }
787     }
788
789     if (m_waitingForFirstImage || (!allTracksAreLive && !m_haveSeenMetadata))
790         return MediaPlayer::ReadyState::HaveMetadata;
791
792     return MediaPlayer::ReadyState::HaveEnoughData;
793 }
794
795 void MediaPlayerPrivateMediaStreamAVFObjC::updateReadyState()
796 {
797     MediaPlayer::ReadyState newReadyState = currentReadyState();
798
799     if (newReadyState != m_readyState) {
800         ALWAYS_LOG(LOGIDENTIFIER, "updated to ", (int)newReadyState);
801         setReadyState(newReadyState);
802     }
803 }
804
805 void MediaPlayerPrivateMediaStreamAVFObjC::activeStatusChanged()
806 {
807     scheduleDeferredTask([this] {
808         bool ended = !m_mediaStreamPrivate->active();
809         if (ended && playing())
810             pause();
811
812         updateReadyState();
813         updateDisplayMode();
814
815         if (ended != m_ended) {
816             m_ended = ended;
817             if (m_player) {
818                 m_player->timeChanged();
819                 m_player->characteristicChanged();
820             }
821         }
822     });
823 }
824
825 void MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode()
826 {
827     if (!updateDisplayMode())
828         return;
829
830     scheduleDeferredTask([this] {
831         m_transformIsValid = false;
832         if (m_player)
833             m_player->client().mediaPlayerRenderingModeChanged(m_player);
834     });
835
836 }
837
838 void MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged()
839 {
840     bool sizeChanged = false;
841
842     FloatSize intrinsicSize = m_mediaStreamPrivate->intrinsicSize();
843     if (intrinsicSize.height() != m_intrinsicSize.height() || intrinsicSize.width() != m_intrinsicSize.width()) {
844         m_intrinsicSize = intrinsicSize;
845         sizeChanged = true;
846         if (m_playbackState == PlaybackState::None)
847             m_playbackState = PlaybackState::Paused;
848     }
849
850     updateTracks();
851     updateDisplayMode();
852
853     scheduleDeferredTask([this, sizeChanged] {
854         updateReadyState();
855
856         if (!m_player)
857             return;
858
859         m_player->characteristicChanged();
860         if (sizeChanged) {
861             m_player->sizeChanged();
862         }
863     });
864 }
865
866 void MediaPlayerPrivateMediaStreamAVFObjC::didAddTrack(MediaStreamTrackPrivate&)
867 {
868     updateTracks();
869 }
870
871 void MediaPlayerPrivateMediaStreamAVFObjC::didRemoveTrack(MediaStreamTrackPrivate&)
872 {
873     updateTracks();
874 }
875
876 void MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated(MediaStreamTrackPrivate& track, MediaSample& mediaSample)
877 {
878     ASSERT(track.id() == mediaSample.trackID());
879     ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
880     ASSERT(m_mediaStreamPrivate);
881
882     if (streamTime().toDouble() < 0)
883         return;
884
885     switch (track.type()) {
886     case RealtimeMediaSource::Type::None:
887         // Do nothing.
888         break;
889     case RealtimeMediaSource::Type::Audio:
890         break;
891     case RealtimeMediaSource::Type::Video:
892         if (&track == m_activeVideoTrack.get())
893             enqueueVideoSample(track, mediaSample);
894         break;
895     }
896 }
897
898 void MediaPlayerPrivateMediaStreamAVFObjC::readyStateChanged(MediaStreamTrackPrivate&)
899 {
900     scheduleDeferredTask([this] {
901         updateReadyState();
902     });
903 }
904
905 bool MediaPlayerPrivateMediaStreamAVFObjC::supportsPictureInPicture() const
906 {
907 #if PLATFORM(IOS_FAMILY)
908     for (const auto& track : m_videoTrackMap.values()) {
909         if (track->streamTrack().isCaptureTrack())
910             return false;
911     }
912 #endif
913     
914     return true;
915 }
916
917 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
918 {
919     updateCurrentFrameImage();
920     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_imagePainter.cgImage);
921 }
922
923 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenFrame(FloatRect frame)
924 {
925     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
926 }
927
928 typedef enum {
929     Add,
930     Remove,
931     Configure
932 } TrackState;
933
934 template <typename RefT>
935 void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), const Function<void(typename RefT::ValueType&, int, TrackState)>& configureTrack)
936 {
937     Vector<RefT> removedTracks;
938     Vector<RefT> addedTracks;
939     Vector<RefPtr<MediaStreamTrackPrivate>> addedPrivateTracks;
940
941     for (const auto& track : currentTracks) {
942         if (track->type() != trackType)
943             continue;
944
945         if (!trackMap.contains(track->id()))
946             addedPrivateTracks.append(track);
947     }
948
949     for (const auto& track : trackMap.values()) {
950         auto& streamTrack = track->streamTrack();
951         if (currentTracks.contains(&streamTrack))
952             continue;
953
954         removedTracks.append(track);
955     }
956     for (auto& track : removedTracks)
957         trackMap.remove(track->streamTrack().id());
958
959     for (auto& track : addedPrivateTracks) {
960         RefT newTrack = itemFactory(*track.get());
961         trackMap.add(track->id(), newTrack);
962         addedTracks.append(newTrack);
963     }
964
965     int index = 0;
966     for (auto& track : removedTracks)
967         configureTrack(*track, index++, TrackState::Remove);
968
969     index = 0;
970     for (auto& track : addedTracks)
971         configureTrack(*track, index++, TrackState::Add);
972
973     index = 0;
974     for (const auto& track : trackMap.values())
975         configureTrack(*track, index++, TrackState::Configure);
976 }
977
978 void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
979 {
980     if (m_pendingSelectedTrackCheck)
981         return;
982
983     m_pendingSelectedTrackCheck = true;
984     scheduleDeferredTask([this] {
985         auto oldVideoTrack = m_activeVideoTrack;
986         bool hideVideoLayer = true;
987         m_activeVideoTrack = nullptr;
988         if (m_mediaStreamPrivate->activeVideoTrack()) {
989             for (const auto& track : m_videoTrackMap.values()) {
990                 if (&track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack()) {
991                     m_activeVideoTrack = m_mediaStreamPrivate->activeVideoTrack();
992                     if (track->selected())
993                         hideVideoLayer = false;
994                     break;
995                 }
996             }
997         }
998
999         if (oldVideoTrack != m_activeVideoTrack) {
1000             m_imagePainter.reset();
1001             if (m_displayMode == None)
1002                 m_waitingForFirstImage = true;
1003         }
1004         ensureLayers();
1005         m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer || m_displayMode < PausedImage;
1006         m_backgroundLayer.get().hidden = hideBackgroundLayer();
1007
1008         m_pendingSelectedTrackCheck = false;
1009         updateDisplayMode();
1010     });
1011 }
1012
1013 void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
1014 {
1015     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
1016
1017     auto setAudioTrackState = [this](AudioTrackPrivateMediaStreamCocoa& track, int index, TrackState state)
1018     {
1019         switch (state) {
1020         case TrackState::Remove:
1021             track.streamTrack().removeObserver(*this);
1022             m_player->removeAudioTrack(track);
1023             break;
1024         case TrackState::Add:
1025             track.streamTrack().addObserver(*this);
1026             m_player->addAudioTrack(track);
1027             break;
1028         case TrackState::Configure:
1029             track.setTrackIndex(index);
1030             bool enabled = track.streamTrack().enabled() && !track.streamTrack().muted();
1031             track.setEnabled(enabled);
1032             break;
1033         }
1034     };
1035     updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Type::Audio, currentTracks, &AudioTrackPrivateMediaStreamCocoa::create, WTFMove(setAudioTrackState));
1036
1037     auto setVideoTrackState = [this](VideoTrackPrivateMediaStream& track, int index, TrackState state)
1038     {
1039         switch (state) {
1040         case TrackState::Remove:
1041             track.streamTrack().removeObserver(*this);
1042             m_player->removeVideoTrack(track);
1043             checkSelectedVideoTrack();
1044             break;
1045         case TrackState::Add:
1046             track.streamTrack().addObserver(*this);
1047             m_player->addVideoTrack(track);
1048             break;
1049         case TrackState::Configure:
1050             track.setTrackIndex(index);
1051             bool selected = &track.streamTrack() == m_mediaStreamPrivate->activeVideoTrack();
1052             track.setSelected(selected);
1053             checkSelectedVideoTrack();
1054             break;
1055         }
1056     };
1057     updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Type::Video, currentTracks, &VideoTrackPrivateMediaStream::create, WTFMove(setVideoTrackState));
1058 }
1059
1060 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
1061 {
1062     return std::make_unique<PlatformTimeRanges>();
1063 }
1064
1065 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::buffered() const
1066 {
1067     return std::make_unique<PlatformTimeRanges>();
1068 }
1069
1070 void MediaPlayerPrivateMediaStreamAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
1071 {
1072     paintCurrentFrameInContext(context, rect);
1073 }
1074
1075 void MediaPlayerPrivateMediaStreamAVFObjC::updateCurrentFrameImage()
1076 {
1077     if (m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1078         return;
1079
1080     if (!m_imagePainter.pixelBufferConformer)
1081         m_imagePainter.pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)@{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) });
1082
1083     ASSERT(m_imagePainter.pixelBufferConformer);
1084     if (!m_imagePainter.pixelBufferConformer)
1085         return;
1086
1087     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_imagePainter.mediaSample->platformSample().sample.cmSampleBuffer));
1088     m_imagePainter.cgImage = m_imagePainter.pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer);
1089 }
1090
1091 void MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& destRect)
1092 {
1093     if (m_displayMode == None || !metaDataAvailable() || context.paintingDisabled())
1094         return;
1095
1096     if (m_displayMode != PaintItBlack && m_imagePainter.mediaSample)
1097         updateCurrentFrameImage();
1098
1099     GraphicsContextStateSaver stateSaver(context);
1100     if (m_displayMode == PaintItBlack) {
1101         context.fillRect(IntRect(IntPoint(), IntSize(destRect.width(), destRect.height())), Color::black);
1102         return;
1103     }
1104
1105     if (!m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1106         return;
1107
1108     auto image = m_imagePainter.cgImage.get();
1109     FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
1110     AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
1111     FloatRect transformedDestRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(destRect);
1112     context.concatCTM(videoTransform);
1113     context.drawNativeImage(image, imageRect.size(), transformedDestRect, imageRect);
1114 }
1115
1116 void MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged()
1117 {
1118     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
1119         ensureLayers();
1120     else
1121         destroyLayers();
1122 }
1123
1124 String MediaPlayerPrivateMediaStreamAVFObjC::engineDescription() const
1125 {
1126     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaStream Engine"));
1127     return description;
1128 }
1129
1130 void MediaPlayerPrivateMediaStreamAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1131 {
1132     if (m_readyState == readyState)
1133         return;
1134
1135     if (readyState != MediaPlayer::ReadyState::HaveNothing)
1136         m_haveSeenMetadata = true;
1137     m_readyState = readyState;
1138     characteristicsChanged();
1139
1140     m_player->readyStateChanged();
1141 }
1142
1143 void MediaPlayerPrivateMediaStreamAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1144 {
1145     if (m_networkState == networkState)
1146         return;
1147
1148     m_networkState = networkState;
1149     m_player->networkStateChanged();
1150 }
1151
1152 void MediaPlayerPrivateMediaStreamAVFObjC::setShouldBufferData(bool shouldBuffer)
1153 {
1154     if (!shouldBuffer)
1155         flushAndRemoveVideoSampleBuffers();
1156 }
1157
1158 void MediaPlayerPrivateMediaStreamAVFObjC::scheduleDeferredTask(Function<void ()>&& function)
1159 {
1160     ASSERT(function);
1161     callOnMainThread([weakThis = makeWeakPtr(*this), function = WTFMove(function)] {
1162         if (!weakThis)
1163             return;
1164
1165         function();
1166     });
1167 }
1168
1169 void MediaPlayerPrivateMediaStreamAVFObjC::CurrentFramePainter::reset()
1170 {
1171     cgImage = nullptr;
1172     mediaSample = nullptr;
1173     pixelBufferConformer = nullptr;
1174 }
1175
1176 void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
1177 {
1178     if (!m_backgroundLayer || !m_sampleBufferDisplayLayer)
1179         return;
1180
1181     auto backgroundBounds = m_backgroundLayer.get().bounds;
1182     auto videoBounds = backgroundBounds;
1183     if (m_videoRotation == MediaSample::VideoRotation::Right || m_videoRotation == MediaSample::VideoRotation::Left)
1184         std::swap(videoBounds.size.width, videoBounds.size.height);
1185
1186     m_sampleBufferDisplayLayer.get().bounds = videoBounds;
1187     m_sampleBufferDisplayLayer.get().position = { backgroundBounds.size.width / 2, backgroundBounds.size.height / 2};
1188 }
1189
1190 void MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayerBoundsChanged()
1191 {
1192     runWithoutAnimations([this] {
1193         updateDisplayLayer();
1194     });
1195 }
1196
1197 #if !RELEASE_LOG_DISABLED
1198 WTFLogChannel& MediaPlayerPrivateMediaStreamAVFObjC::logChannel() const
1199 {
1200     return LogMedia;
1201 }
1202 #endif
1203
1204 }
1205
1206 #endif