[MediaStream] A stream's first video frame should be rendered
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaStreamAVFObjC.mm
1 /*
2  * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaStreamAVFObjC.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AudioTrackPrivateMediaStreamCocoa.h"
32 #import "GraphicsContextCG.h"
33 #import "Logging.h"
34 #import "MediaStreamPrivate.h"
35 #import "PixelBufferConformerCV.h"
36 #import "VideoFullscreenLayerManagerObjC.h"
37 #import "VideoTrackPrivateMediaStream.h"
38 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
39 #import <QuartzCore/CALayer.h>
40 #import <QuartzCore/CATransaction.h>
41 #import <objc_runtime.h>
42 #import <pal/avfoundation/MediaTimeAVFoundation.h>
43 #import <pal/spi/mac/AVFoundationSPI.h>
44 #import <pal/system/Clock.h>
45 #import <wtf/Function.h>
46 #import <wtf/MainThread.h>
47 #import <wtf/NeverDestroyed.h>
48
49
50 #pragma mark - Soft Linking
51
52 #import <pal/cf/CoreMediaSoftLink.h>
53 #import "CoreVideoSoftLink.h"
54
55 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
56
57 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
58
59 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
60 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
61 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
62
63 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
64 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
65 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
66
67 using namespace WebCore;
68
69 @interface WebAVSampleBufferStatusChangeListener : NSObject {
70     MediaPlayerPrivateMediaStreamAVFObjC* _parent;
71 }
72
73 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
74 - (void)invalidate;
75 - (void)beginObservingLayers;
76 - (void)stopObservingLayers;
77 @end
78
79 @implementation WebAVSampleBufferStatusChangeListener
80
81 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)parent
82 {
83     if (!(self = [super init]))
84         return nil;
85
86     _parent = parent;
87
88     return self;
89 }
90
91 - (void)dealloc
92 {
93     [self invalidate];
94     [super dealloc];
95 }
96
97 - (void)invalidate
98 {
99     [self stopObservingLayers];
100     _parent = nullptr;
101 }
102
103 - (void)beginObservingLayers
104 {
105     ASSERT(_parent);
106     ASSERT(_parent->displayLayer());
107     ASSERT(_parent->backgroundLayer());
108
109     [_parent->displayLayer() addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
110     [_parent->displayLayer() addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nil];
111     [_parent->backgroundLayer() addObserver:self forKeyPath:@"bounds" options:NSKeyValueObservingOptionNew context:nil];
112 }
113
114 - (void)stopObservingLayers
115 {
116     if (!_parent)
117         return;
118
119     if (_parent->displayLayer()) {
120         [_parent->displayLayer() removeObserver:self forKeyPath:@"status"];
121         [_parent->displayLayer() removeObserver:self forKeyPath:@"error"];
122     }
123     if (_parent->backgroundLayer())
124         [_parent->backgroundLayer() removeObserver:self forKeyPath:@"bounds"];
125 }
126
127 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
128 {
129     UNUSED_PARAM(context);
130     UNUSED_PARAM(keyPath);
131     ASSERT(_parent);
132
133     if (!_parent)
134         return;
135
136     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
137         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
138         ASSERT(layer.get() == _parent->displayLayer());
139
140         if ([keyPath isEqualToString:@"status"]) {
141             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
142             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
143                 if (!protectedSelf->_parent)
144                     return;
145
146                 protectedSelf->_parent->layerStatusDidChange(layer.get());
147             });
148             return;
149         }
150
151         if ([keyPath isEqualToString:@"error"]) {
152             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
153             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
154                 if (!protectedSelf->_parent)
155                     return;
156
157                 protectedSelf->_parent->layerErrorDidChange(layer.get());
158             });
159             return;
160         }
161     }
162
163     if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
164         return;
165
166     if ((CALayer *)object == _parent->backgroundLayer()) {
167         if ([keyPath isEqualToString:@"bounds"]) {
168             if (!_parent)
169                 return;
170
171             if (isMainThread()) {
172                 _parent->backgroundLayerBoundsChanged();
173                 return;
174             }
175
176             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
177                 if (!protectedSelf->_parent)
178                     return;
179
180                 protectedSelf->_parent->backgroundLayerBoundsChanged();
181             });
182         }
183     }
184
185 }
186 @end
187
188 namespace WebCore {
189 using namespace PAL;
190
191 #pragma mark -
192 #pragma mark MediaPlayerPrivateMediaStreamAVFObjC
193
194 static const double rendererLatency = 0.02;
195
196 MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer* player)
197     : m_player(player)
198     , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
199     , m_clock(PAL::Clock::create())
200     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
201 #if !RELEASE_LOG_DISABLED
202     , m_logger(player->mediaPlayerLogger())
203     , m_logIdentifier(player->mediaPlayerLogIdentifier())
204 #endif
205 {
206     INFO_LOG(LOGIDENTIFIER);
207 }
208
209 MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
210 {
211     INFO_LOG(LOGIDENTIFIER);
212
213     [m_statusChangeListener invalidate];
214
215     for (const auto& track : m_audioTrackMap.values())
216         track->pause();
217
218     if (m_mediaStreamPrivate) {
219         m_mediaStreamPrivate->removeObserver(*this);
220
221         for (auto& track : m_mediaStreamPrivate->tracks())
222             track->removeObserver(*this);
223     }
224
225     destroyLayers();
226
227     m_audioTrackMap.clear();
228     m_videoTrackMap.clear();
229 }
230
231 #pragma mark -
232 #pragma mark MediaPlayer Factory Methods
233
234 void MediaPlayerPrivateMediaStreamAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
235 {
236     if (isAvailable())
237         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaStreamAVFObjC>(player); }, getSupportedTypes,
238             supportsType, 0, 0, 0, 0);
239 }
240
241 bool MediaPlayerPrivateMediaStreamAVFObjC::isAvailable()
242 {
243     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable() && getAVSampleBufferDisplayLayerClass();
244 }
245
246 void MediaPlayerPrivateMediaStreamAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
247 {
248     // FIXME: Is it really correct to list no supported types?
249     types.clear();
250 }
251
252 MediaPlayer::SupportsType MediaPlayerPrivateMediaStreamAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
253 {
254     return parameters.isMediaStream ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
255 }
256
257 #pragma mark -
258 #pragma mark AVSampleBuffer Methods
259
260 void MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue(PendingSampleQueue& queue)
261 {
262     if (queue.isEmpty())
263         return;
264
265     auto decodeTime = queue.first()->decodeTime();
266     if (!decodeTime.isValid() || decodeTime < MediaTime::zeroTime()) {
267         while (queue.size() > 5)
268             queue.removeFirst();
269
270         return;
271     }
272
273     MediaTime now = streamTime();
274     while (!queue.isEmpty()) {
275         if (queue.first()->decodeTime() > now)
276             break;
277         queue.removeFirst();
278     }
279 }
280
281 void MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue(PendingSampleQueue& queue, MediaSample& sample)
282 {
283     removeOldSamplesFromPendingQueue(queue);
284     queue.append(sample);
285 }
286
287 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::calculateTimelineOffset(const MediaSample& sample, double latency)
288 {
289     MediaTime sampleTime = sample.outputPresentationTime();
290     if (!sampleTime || !sampleTime.isValid())
291         sampleTime = sample.presentationTime();
292     MediaTime timelineOffset = streamTime() - sampleTime + MediaTime::createWithDouble(latency);
293     if (timelineOffset.timeScale() != sampleTime.timeScale())
294         timelineOffset = PAL::toMediaTime(CMTimeConvertScale(PAL::toCMTime(timelineOffset), sampleTime.timeScale(), kCMTimeRoundingMethod_Default));
295     return timelineOffset;
296 }
297
298 CGAffineTransform MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix(MediaSample& sample, bool forceUpdate)
299 {
300     if (!forceUpdate && m_transformIsValid)
301         return m_videoTransform;
302
303     CMSampleBufferRef sampleBuffer = sample.platformSample().sample.cmSampleBuffer;
304     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sampleBuffer));
305     size_t width = CVPixelBufferGetWidth(pixelBuffer);
306     size_t height = CVPixelBufferGetHeight(pixelBuffer);
307     if (!width || !height)
308         return CGAffineTransformIdentity;
309
310     ASSERT(m_videoRotation >= MediaSample::VideoRotation::None);
311     ASSERT(m_videoRotation <= MediaSample::VideoRotation::Left);
312
313     m_videoTransform = CGAffineTransformMakeRotation(static_cast<int>(m_videoRotation) * M_PI / 180);
314     if (sample.videoMirrored())
315         m_videoTransform = CGAffineTransformScale(m_videoTransform, -1, 1);
316
317     m_transformIsValid = true;
318     return m_videoTransform;
319 }
320
321 static void runWithoutAnimations(const WTF::Function<void()>& function)
322 {
323     [CATransaction begin];
324     [CATransaction setAnimationDuration:0];
325     [CATransaction setDisableActions:YES];
326     function();
327     [CATransaction commit];
328 }
329
330 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueCorrectedVideoSample(MediaSample& sample)
331 {
332     if (m_sampleBufferDisplayLayer) {
333         if ([m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed)
334             return;
335
336         if (sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored) {
337             m_videoRotation = sample.videoRotation();
338             m_videoMirrored = sample.videoMirrored();
339             runWithoutAnimations([this, &sample] {
340                 m_sampleBufferDisplayLayer.get().affineTransform = videoTransformationMatrix(sample, true);
341                 updateDisplayLayer();
342             });
343         }
344
345         if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
346             addSampleToPendingQueue(m_pendingVideoSampleQueue, sample);
347             requestNotificationWhenReadyForVideoData();
348             return;
349         }
350
351         [m_sampleBufferDisplayLayer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
352     }
353
354     if (!m_hasEverEnqueuedVideoFrame) {
355         m_hasEverEnqueuedVideoFrame = true;
356         m_player->firstVideoFrameAvailable();
357     }
358 }
359
360 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate& track, MediaSample& sample)
361 {
362     ASSERT(m_videoTrackMap.contains(track.id()));
363
364     if (&track != m_mediaStreamPrivate->activeVideoTrack())
365         return;
366
367     if (!m_imagePainter.mediaSample || m_displayMode != PausedImage) {
368         m_imagePainter.mediaSample = &sample;
369         m_imagePainter.cgImage = nullptr;
370         if (m_readyState < MediaPlayer::ReadyState::HaveEnoughData)
371             updateReadyState();
372     }
373
374     if (m_displayMode != LivePreview && !m_waitingForFirstImage)
375         return;
376
377     auto videoTrack = m_videoTrackMap.get(track.id());
378     MediaTime timelineOffset = videoTrack->timelineOffset();
379     if (timelineOffset == MediaTime::invalidTime()) {
380         timelineOffset = calculateTimelineOffset(sample, rendererLatency);
381         videoTrack->setTimelineOffset(timelineOffset);
382
383         INFO_LOG(LOGIDENTIFIER, "timeline offset for track ", track.id(), " set to ", timelineOffset);
384     }
385
386     DEBUG_LOG(LOGIDENTIFIER, "original sample = ", toString(sample));
387     sample.offsetTimestampsBy(timelineOffset);
388     DEBUG_LOG(LOGIDENTIFIER, "updated sample = ", toString(sample));
389
390     if (WILL_LOG(WTFLogLevelDebug)) {
391         MediaTime now = streamTime();
392         double delta = (sample.presentationTime() - now).toDouble();
393         if (delta < 0)
394             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* sample at time is ", now, " is", -delta, " seconds late");
395         else if (delta < .01)
396             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is only ", delta, " seconds early");
397         else if (delta > .3)
398             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is ", delta, " seconds early!");
399     }
400
401     enqueueCorrectedVideoSample(sample);
402     if (m_waitingForFirstImage) {
403         m_waitingForFirstImage = false;
404         updateDisplayMode();
405     }
406 }
407
408 void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
409 {
410     auto weakThis = makeWeakPtr(*this);
411     [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
412         if (!weakThis)
413             return;
414
415         [m_sampleBufferDisplayLayer stopRequestingMediaData];
416
417         while (!m_pendingVideoSampleQueue.isEmpty()) {
418             if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
419                 requestNotificationWhenReadyForVideoData();
420                 return;
421             }
422
423             auto sample = m_pendingVideoSampleQueue.takeFirst();
424             enqueueVideoSample(*m_activeVideoTrack.get(), sample.get());
425         }
426     }];
427 }
428
429 AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
430 {
431     // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
432     return nullptr;
433 }
434
435 void MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(AVSampleBufferDisplayLayer* layer)
436 {
437     UNUSED_PARAM(layer);
438     ERROR_LOG(LOGIDENTIFIER, "error = ", [[layer.error localizedDescription] UTF8String]);
439 }
440
441 void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer)
442 {
443     ALWAYS_LOG(LOGIDENTIFIER, "status = ", (int)layer.status);
444
445     if (layer.status != AVQueuedSampleBufferRenderingStatusRendering)
446         return;
447     if (!m_sampleBufferDisplayLayer || !m_activeVideoTrack || layer != m_sampleBufferDisplayLayer)
448         return;
449
450     auto track = m_videoTrackMap.get(m_activeVideoTrack->id());
451     if (track)
452         track->setTimelineOffset(MediaTime::invalidTime());
453 }
454
455 void MediaPlayerPrivateMediaStreamAVFObjC::applicationDidBecomeActive()
456 {
457     if (m_sampleBufferDisplayLayer && [m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed) {
458         flushRenderers();
459         if (m_imagePainter.mediaSample)
460             enqueueCorrectedVideoSample(*m_imagePainter.mediaSample);
461         updateDisplayMode();
462     }
463 }
464
465 void MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers()
466 {
467     if (m_sampleBufferDisplayLayer)
468         [m_sampleBufferDisplayLayer flush];
469 }
470
471 void MediaPlayerPrivateMediaStreamAVFObjC::flushAndRemoveVideoSampleBuffers()
472 {
473     [m_sampleBufferDisplayLayer flushAndRemoveImage];
474 }
475
476 void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
477 {
478     if (m_sampleBufferDisplayLayer)
479         return;
480
481     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->activeVideoTrack() || !m_mediaStreamPrivate->activeVideoTrack()->enabled())
482         return;
483
484     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
485     if (!m_sampleBufferDisplayLayer) {
486         ERROR_LOG(LOGIDENTIFIER, "+[AVSampleBufferDisplayLayer alloc] failed.");
487         return;
488     }
489
490     m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
491     m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
492     m_sampleBufferDisplayLayer.get().needsDisplayOnBoundsChange = YES;
493     m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
494
495     m_backgroundLayer = adoptNS([[CALayer alloc] init]);
496     m_backgroundLayer.get().hidden = hideBackgroundLayer();
497
498     m_backgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
499     m_backgroundLayer.get().needsDisplayOnBoundsChange = YES;
500
501     auto size = snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size();
502     m_backgroundLayer.get().bounds = CGRectMake(0, 0, size.width(), size.height());
503
504     [m_statusChangeListener beginObservingLayers];
505
506     [m_backgroundLayer addSublayer:m_sampleBufferDisplayLayer.get()];
507
508 #ifndef NDEBUG
509     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer"];
510     [m_backgroundLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer parent"];
511 #endif
512
513     updateRenderingMode();
514     updateDisplayLayer();
515
516     m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), size);
517 }
518
519 void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
520 {
521     [m_statusChangeListener stopObservingLayers];
522     if (m_sampleBufferDisplayLayer) {
523         m_pendingVideoSampleQueue.clear();
524         [m_sampleBufferDisplayLayer stopRequestingMediaData];
525         [m_sampleBufferDisplayLayer flush];
526         m_sampleBufferDisplayLayer = nullptr;
527     }
528     m_backgroundLayer = nullptr;
529
530     updateRenderingMode();
531     
532     m_videoFullscreenLayerManager->didDestroyVideoLayer();
533 }
534
535 #pragma mark -
536 #pragma mark MediaPlayerPrivateInterface Overrides
537
538 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&)
539 {
540     // This media engine only supports MediaStream URLs.
541     scheduleDeferredTask([this] {
542         setNetworkState(MediaPlayer::FormatError);
543     });
544 }
545
546 #if ENABLE(MEDIA_SOURCE)
547 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&, MediaSourcePrivateClient*)
548 {
549     // This media engine only supports MediaStream URLs.
550     scheduleDeferredTask([this] {
551         setNetworkState(MediaPlayer::FormatError);
552     });
553 }
554 #endif
555
556 void MediaPlayerPrivateMediaStreamAVFObjC::load(MediaStreamPrivate& stream)
557 {
558     INFO_LOG(LOGIDENTIFIER);
559
560     m_intrinsicSize = FloatSize();
561
562     m_mediaStreamPrivate = &stream;
563     m_mediaStreamPrivate->addObserver(*this);
564     m_ended = !m_mediaStreamPrivate->active();
565
566     scheduleDeferredTask([this] {
567         updateTracks();
568         setNetworkState(MediaPlayer::Idle);
569         updateReadyState();
570     });
571 }
572
573 bool MediaPlayerPrivateMediaStreamAVFObjC::didPassCORSAccessCheck() const
574 {
575     // We are only doing a check on the active video track since the sole consumer of this API is canvas.
576     // FIXME: We should change the name of didPassCORSAccessCheck if it is expected to stay like this.
577     const auto* track = m_mediaStreamPrivate->activeVideoTrack();
578     return !track || !track->isIsolated();
579 }
580
581 void MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad()
582 {
583     INFO_LOG(LOGIDENTIFIER);
584     if (playing())
585         pause();
586 }
587
588 void MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay()
589 {
590     INFO_LOG(LOGIDENTIFIER);
591 }
592
593 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
594 {
595     if (!m_backgroundLayer || m_displayMode == None)
596         return nullptr;
597
598     return m_videoFullscreenLayerManager->videoInlineLayer();
599 }
600
601 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::displayLayer()
602 {
603     return m_sampleBufferDisplayLayer.get();
604 }
605
606 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayer()
607 {
608     return m_backgroundLayer.get();
609 }
610
611 MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
612 {
613     if (m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
614         return None;
615
616     if (auto* track = m_mediaStreamPrivate->activeVideoTrack()) {
617         if (!track->enabled() || track->muted() || track->ended())
618             return PaintItBlack;
619     }
620
621     if (m_waitingForFirstImage)
622         return WaitingForFirstImage;
623
624     if (playing() && !m_ended) {
625         if (!m_mediaStreamPrivate->isProducingData())
626             return PausedImage;
627         return LivePreview;
628     }
629
630     if (m_playbackState == PlaybackState::None || m_ended)
631         return PaintItBlack;
632
633     return PausedImage;
634 }
635
636 bool MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode()
637 {
638     DisplayMode displayMode = currentDisplayMode();
639
640     if (displayMode == m_displayMode)
641         return false;
642
643     INFO_LOG(LOGIDENTIFIER, "updated to ", static_cast<int>(displayMode));
644     m_displayMode = displayMode;
645
646     auto hidden = m_displayMode < PausedImage;
647     if (m_sampleBufferDisplayLayer && m_sampleBufferDisplayLayer.get().hidden != hidden) {
648         runWithoutAnimations([this, hidden] {
649             m_sampleBufferDisplayLayer.get().hidden = hidden;
650         });
651     }
652     hidden = hideBackgroundLayer();
653     if (m_backgroundLayer && m_backgroundLayer.get().hidden != hidden) {
654         runWithoutAnimations([this, hidden] {
655             m_backgroundLayer.get().hidden = hidden;
656         });
657     }
658
659     return true;
660 }
661
662 void MediaPlayerPrivateMediaStreamAVFObjC::play()
663 {
664     ALWAYS_LOG(LOGIDENTIFIER);
665
666     if (!metaDataAvailable() || playing() || m_ended)
667         return;
668
669     m_playbackState = PlaybackState::Playing;
670     if (!m_clock->isRunning())
671         m_clock->start();
672
673     for (const auto& track : m_audioTrackMap.values())
674         track->play();
675
676     updateDisplayMode();
677
678     scheduleDeferredTask([this] {
679         updateReadyState();
680         if (m_player)
681             m_player->rateChanged();
682     });
683 }
684
685 void MediaPlayerPrivateMediaStreamAVFObjC::pause()
686 {
687     ALWAYS_LOG(LOGIDENTIFIER);
688
689     if (!metaDataAvailable() || !playing() || m_ended)
690         return;
691
692     m_pausedTime = currentMediaTime();
693     m_playbackState = PlaybackState::Paused;
694
695     for (const auto& track : m_audioTrackMap.values())
696         track->pause();
697
698     updateDisplayMode();
699     flushRenderers();
700
701     scheduleDeferredTask([this] {
702         if (m_player)
703             m_player->rateChanged();
704     });
705 }
706
707 void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
708 {
709     if (m_volume == volume)
710         return;
711
712     ALWAYS_LOG(LOGIDENTIFIER, volume);
713     m_volume = volume;
714     for (const auto& track : m_audioTrackMap.values())
715         track->setVolume(m_muted ? 0 : m_volume);
716 }
717
718 void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
719 {
720     if (muted == m_muted)
721         return;
722
723     ALWAYS_LOG(LOGIDENTIFIER, muted);
724     m_muted = muted;
725     for (const auto& track : m_audioTrackMap.values())
726         track->setVolume(m_muted ? 0 : m_volume);
727 }
728
729 bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
730 {
731     if (!metaDataAvailable())
732         return false;
733     
734     return m_mediaStreamPrivate->hasVideo();
735 }
736
737 bool MediaPlayerPrivateMediaStreamAVFObjC::hasAudio() const
738 {
739     if (!metaDataAvailable())
740         return false;
741     
742     return m_mediaStreamPrivate->hasAudio();
743 }
744
745 void MediaPlayerPrivateMediaStreamAVFObjC::setVisible(bool visible)
746 {
747     if (m_visible == visible)
748         return;
749
750     m_visible = visible;
751     if (m_visible)
752         flushRenderers();
753 }
754
755 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::durationMediaTime() const
756 {
757     return MediaTime::positiveInfiniteTime();
758 }
759
760 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::currentMediaTime() const
761 {
762     if (paused())
763         return m_pausedTime;
764
765     return streamTime();
766 }
767
768 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
769 {
770     return MediaTime::createWithDouble(m_clock->currentTime());
771 }
772
773 MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
774 {
775     return m_networkState;
776 }
777
778 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::readyState() const
779 {
780     return m_readyState;
781 }
782
783 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState()
784 {
785     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->active() || !m_mediaStreamPrivate->tracks().size())
786         return MediaPlayer::ReadyState::HaveNothing;
787
788     bool allTracksAreLive = true;
789     for (auto& track : m_mediaStreamPrivate->tracks()) {
790         if (!track->enabled() || track->readyState() != MediaStreamTrackPrivate::ReadyState::Live)
791             allTracksAreLive = false;
792
793         if (track == m_mediaStreamPrivate->activeVideoTrack() && !m_imagePainter.mediaSample) {
794             if (!m_haveSeenMetadata || m_waitingForFirstImage)
795                 return MediaPlayer::ReadyState::HaveNothing;
796             allTracksAreLive = false;
797         }
798     }
799
800     if (m_waitingForFirstImage || (!allTracksAreLive && !m_haveSeenMetadata))
801         return MediaPlayer::ReadyState::HaveMetadata;
802
803     return MediaPlayer::ReadyState::HaveEnoughData;
804 }
805
806 void MediaPlayerPrivateMediaStreamAVFObjC::updateReadyState()
807 {
808     MediaPlayer::ReadyState newReadyState = currentReadyState();
809
810     if (newReadyState != m_readyState) {
811         ALWAYS_LOG(LOGIDENTIFIER, "updated to ", (int)newReadyState);
812         setReadyState(newReadyState);
813     }
814 }
815
816 void MediaPlayerPrivateMediaStreamAVFObjC::activeStatusChanged()
817 {
818     scheduleDeferredTask([this] {
819         bool ended = !m_mediaStreamPrivate->active();
820         if (ended && playing())
821             pause();
822
823         updateReadyState();
824         updateDisplayMode();
825
826         if (ended != m_ended) {
827             m_ended = ended;
828             if (m_player) {
829                 m_player->timeChanged();
830                 m_player->characteristicChanged();
831             }
832         }
833     });
834 }
835
836 void MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode()
837 {
838     if (!updateDisplayMode())
839         return;
840
841     scheduleDeferredTask([this] {
842         m_transformIsValid = false;
843         if (m_player)
844             m_player->client().mediaPlayerRenderingModeChanged(m_player);
845     });
846
847 }
848
849 void MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged()
850 {
851     bool sizeChanged = false;
852
853     FloatSize intrinsicSize = m_mediaStreamPrivate->intrinsicSize();
854     if (intrinsicSize.height() != m_intrinsicSize.height() || intrinsicSize.width() != m_intrinsicSize.width()) {
855         m_intrinsicSize = intrinsicSize;
856         sizeChanged = true;
857         if (m_playbackState == PlaybackState::None)
858             m_playbackState = PlaybackState::Paused;
859     }
860
861     updateTracks();
862     updateDisplayMode();
863
864     scheduleDeferredTask([this, sizeChanged] {
865         updateReadyState();
866
867         if (!m_player)
868             return;
869
870         m_player->characteristicChanged();
871         if (sizeChanged) {
872             m_player->sizeChanged();
873         }
874     });
875 }
876
877 void MediaPlayerPrivateMediaStreamAVFObjC::didAddTrack(MediaStreamTrackPrivate&)
878 {
879     updateTracks();
880 }
881
882 void MediaPlayerPrivateMediaStreamAVFObjC::didRemoveTrack(MediaStreamTrackPrivate&)
883 {
884     updateTracks();
885 }
886
887 void MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated(MediaStreamTrackPrivate& track, MediaSample& mediaSample)
888 {
889     ASSERT(track.id() == mediaSample.trackID());
890     ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
891     ASSERT(m_mediaStreamPrivate);
892
893     if (streamTime().toDouble() < 0)
894         return;
895
896     switch (track.type()) {
897     case RealtimeMediaSource::Type::None:
898         // Do nothing.
899         break;
900     case RealtimeMediaSource::Type::Audio:
901         break;
902     case RealtimeMediaSource::Type::Video:
903         if (&track == m_activeVideoTrack.get())
904             enqueueVideoSample(track, mediaSample);
905         break;
906     }
907 }
908
909 void MediaPlayerPrivateMediaStreamAVFObjC::readyStateChanged(MediaStreamTrackPrivate&)
910 {
911     scheduleDeferredTask([this] {
912         updateReadyState();
913     });
914 }
915
916 bool MediaPlayerPrivateMediaStreamAVFObjC::supportsPictureInPicture() const
917 {
918 #if PLATFORM(IOS_FAMILY)
919     for (const auto& track : m_videoTrackMap.values()) {
920         if (track->streamTrack().isCaptureTrack())
921             return false;
922     }
923 #endif
924     
925     return true;
926 }
927
928 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
929 {
930     updateCurrentFrameImage();
931     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_imagePainter.cgImage);
932 }
933
934 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenFrame(FloatRect frame)
935 {
936     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
937 }
938
939 typedef enum {
940     Add,
941     Remove,
942     Configure
943 } TrackState;
944
945 template <typename RefT>
946 void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), const Function<void(typename RefT::ValueType&, int, TrackState)>& configureTrack)
947 {
948     Vector<RefT> removedTracks;
949     Vector<RefT> addedTracks;
950     Vector<RefPtr<MediaStreamTrackPrivate>> addedPrivateTracks;
951
952     for (const auto& track : currentTracks) {
953         if (track->type() != trackType)
954             continue;
955
956         if (!trackMap.contains(track->id()))
957             addedPrivateTracks.append(track);
958     }
959
960     for (const auto& track : trackMap.values()) {
961         auto& streamTrack = track->streamTrack();
962         if (currentTracks.contains(&streamTrack))
963             continue;
964
965         removedTracks.append(track);
966     }
967     for (auto& track : removedTracks)
968         trackMap.remove(track->streamTrack().id());
969
970     for (auto& track : addedPrivateTracks) {
971         RefT newTrack = itemFactory(*track.get());
972         trackMap.add(track->id(), newTrack);
973         addedTracks.append(newTrack);
974     }
975
976     int index = 0;
977     for (auto& track : removedTracks)
978         configureTrack(*track, index++, TrackState::Remove);
979
980     index = 0;
981     for (auto& track : addedTracks)
982         configureTrack(*track, index++, TrackState::Add);
983
984     index = 0;
985     for (const auto& track : trackMap.values())
986         configureTrack(*track, index++, TrackState::Configure);
987 }
988
989 void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
990 {
991     if (m_pendingSelectedTrackCheck)
992         return;
993
994     m_pendingSelectedTrackCheck = true;
995     scheduleDeferredTask([this] {
996         auto oldVideoTrack = m_activeVideoTrack;
997         bool hideVideoLayer = true;
998         m_activeVideoTrack = nullptr;
999         if (m_mediaStreamPrivate->activeVideoTrack()) {
1000             for (const auto& track : m_videoTrackMap.values()) {
1001                 if (&track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack()) {
1002                     m_activeVideoTrack = m_mediaStreamPrivate->activeVideoTrack();
1003                     if (track->selected())
1004                         hideVideoLayer = false;
1005                     break;
1006                 }
1007             }
1008         }
1009
1010         if (oldVideoTrack != m_activeVideoTrack) {
1011             m_imagePainter.reset();
1012             if (m_displayMode == None)
1013                 m_waitingForFirstImage = true;
1014         }
1015         ensureLayers();
1016         m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer || m_displayMode < PausedImage;
1017         m_backgroundLayer.get().hidden = hideBackgroundLayer();
1018
1019         m_pendingSelectedTrackCheck = false;
1020         updateDisplayMode();
1021     });
1022 }
1023
1024 void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
1025 {
1026     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
1027
1028     auto setAudioTrackState = [this](AudioTrackPrivateMediaStreamCocoa& track, int index, TrackState state)
1029     {
1030         switch (state) {
1031         case TrackState::Remove:
1032             track.streamTrack().removeObserver(*this);
1033             m_player->removeAudioTrack(track);
1034             break;
1035         case TrackState::Add:
1036             track.streamTrack().addObserver(*this);
1037             m_player->addAudioTrack(track);
1038             break;
1039         case TrackState::Configure:
1040             track.setTrackIndex(index);
1041             bool enabled = track.streamTrack().enabled() && !track.streamTrack().muted();
1042             track.setEnabled(enabled);
1043             break;
1044         }
1045     };
1046     updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Type::Audio, currentTracks, &AudioTrackPrivateMediaStreamCocoa::create, WTFMove(setAudioTrackState));
1047
1048     auto setVideoTrackState = [this](VideoTrackPrivateMediaStream& track, int index, TrackState state)
1049     {
1050         switch (state) {
1051         case TrackState::Remove:
1052             track.streamTrack().removeObserver(*this);
1053             m_player->removeVideoTrack(track);
1054             checkSelectedVideoTrack();
1055             break;
1056         case TrackState::Add:
1057             track.streamTrack().addObserver(*this);
1058             m_player->addVideoTrack(track);
1059             break;
1060         case TrackState::Configure:
1061             track.setTrackIndex(index);
1062             bool selected = &track.streamTrack() == m_mediaStreamPrivate->activeVideoTrack();
1063             track.setSelected(selected);
1064             checkSelectedVideoTrack();
1065             break;
1066         }
1067     };
1068     updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Type::Video, currentTracks, &VideoTrackPrivateMediaStream::create, WTFMove(setVideoTrackState));
1069 }
1070
1071 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
1072 {
1073     return std::make_unique<PlatformTimeRanges>();
1074 }
1075
1076 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::buffered() const
1077 {
1078     return std::make_unique<PlatformTimeRanges>();
1079 }
1080
1081 void MediaPlayerPrivateMediaStreamAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
1082 {
1083     paintCurrentFrameInContext(context, rect);
1084 }
1085
1086 void MediaPlayerPrivateMediaStreamAVFObjC::updateCurrentFrameImage()
1087 {
1088     if (m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1089         return;
1090
1091     if (!m_imagePainter.pixelBufferConformer)
1092         m_imagePainter.pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)@{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) });
1093
1094     ASSERT(m_imagePainter.pixelBufferConformer);
1095     if (!m_imagePainter.pixelBufferConformer)
1096         return;
1097
1098     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_imagePainter.mediaSample->platformSample().sample.cmSampleBuffer));
1099     m_imagePainter.cgImage = m_imagePainter.pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer);
1100 }
1101
1102 void MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& destRect)
1103 {
1104     if (m_displayMode == None || !metaDataAvailable() || context.paintingDisabled())
1105         return;
1106
1107     if (m_displayMode != PaintItBlack && m_imagePainter.mediaSample)
1108         updateCurrentFrameImage();
1109
1110     GraphicsContextStateSaver stateSaver(context);
1111     if (m_displayMode == PaintItBlack) {
1112         context.fillRect(IntRect(IntPoint(), IntSize(destRect.width(), destRect.height())), Color::black);
1113         return;
1114     }
1115
1116     if (!m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1117         return;
1118
1119     auto image = m_imagePainter.cgImage.get();
1120     FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
1121     AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
1122     FloatRect transformedDestRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(destRect);
1123     context.concatCTM(videoTransform);
1124     context.drawNativeImage(image, imageRect.size(), transformedDestRect, imageRect);
1125 }
1126
1127 void MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged()
1128 {
1129     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
1130         ensureLayers();
1131     else
1132         destroyLayers();
1133 }
1134
1135 String MediaPlayerPrivateMediaStreamAVFObjC::engineDescription() const
1136 {
1137     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaStream Engine"));
1138     return description;
1139 }
1140
1141 void MediaPlayerPrivateMediaStreamAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1142 {
1143     if (m_readyState == readyState)
1144         return;
1145
1146     if (readyState != MediaPlayer::ReadyState::HaveNothing)
1147         m_haveSeenMetadata = true;
1148     m_readyState = readyState;
1149     characteristicsChanged();
1150
1151     m_player->readyStateChanged();
1152 }
1153
1154 void MediaPlayerPrivateMediaStreamAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1155 {
1156     if (m_networkState == networkState)
1157         return;
1158
1159     m_networkState = networkState;
1160     m_player->networkStateChanged();
1161 }
1162
1163 void MediaPlayerPrivateMediaStreamAVFObjC::setShouldBufferData(bool shouldBuffer)
1164 {
1165     if (!shouldBuffer)
1166         flushAndRemoveVideoSampleBuffers();
1167 }
1168
1169 void MediaPlayerPrivateMediaStreamAVFObjC::scheduleDeferredTask(Function<void ()>&& function)
1170 {
1171     ASSERT(function);
1172     callOnMainThread([weakThis = makeWeakPtr(*this), function = WTFMove(function)] {
1173         if (!weakThis)
1174             return;
1175
1176         function();
1177     });
1178 }
1179
1180 void MediaPlayerPrivateMediaStreamAVFObjC::CurrentFramePainter::reset()
1181 {
1182     cgImage = nullptr;
1183     mediaSample = nullptr;
1184     pixelBufferConformer = nullptr;
1185 }
1186
1187 void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
1188 {
1189     if (!m_backgroundLayer || !m_sampleBufferDisplayLayer)
1190         return;
1191
1192     auto backgroundBounds = m_backgroundLayer.get().bounds;
1193     auto videoBounds = backgroundBounds;
1194     if (m_videoRotation == MediaSample::VideoRotation::Right || m_videoRotation == MediaSample::VideoRotation::Left)
1195         std::swap(videoBounds.size.width, videoBounds.size.height);
1196
1197     m_sampleBufferDisplayLayer.get().bounds = videoBounds;
1198     m_sampleBufferDisplayLayer.get().position = { backgroundBounds.size.width / 2, backgroundBounds.size.height / 2};
1199 }
1200
1201 void MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayerBoundsChanged()
1202 {
1203     runWithoutAnimations([this] {
1204         updateDisplayLayer();
1205     });
1206 }
1207
1208 #if !RELEASE_LOG_DISABLED
1209 WTFLogChannel& MediaPlayerPrivateMediaStreamAVFObjC::logChannel() const
1210 {
1211     return LogMedia;
1212 }
1213 #endif
1214
1215 }
1216
1217 #endif