da6b8d4d13bd7a81b8a4f864c0ecacfbbb755496
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaStreamAVFObjC.mm
1 /*
2  * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaStreamAVFObjC.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AudioTrackPrivateMediaStreamCocoa.h"
32 #import "GraphicsContextCG.h"
33 #import "Logging.h"
34 #import "MediaStreamPrivate.h"
35 #import "PixelBufferConformerCV.h"
36 #import "VideoFullscreenLayerManagerObjC.h"
37 #import "VideoTrackPrivateMediaStream.h"
38 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
39 #import <QuartzCore/CALayer.h>
40 #import <QuartzCore/CATransaction.h>
41 #import <objc_runtime.h>
42 #import <pal/avfoundation/MediaTimeAVFoundation.h>
43 #import <pal/spi/mac/AVFoundationSPI.h>
44 #import <pal/system/Clock.h>
45 #import <wtf/Function.h>
46 #import <wtf/MainThread.h>
47 #import <wtf/NeverDestroyed.h>
48
49
50 #pragma mark - Soft Linking
51
52 #import <pal/cf/CoreMediaSoftLink.h>
53 #import "CoreVideoSoftLink.h"
54
55 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
56
57 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
58
59 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
60 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
61 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
62
63 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
64 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
65 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
66
67 using namespace WebCore;
68
69 @interface WebAVSampleBufferStatusChangeListener : NSObject {
70     MediaPlayerPrivateMediaStreamAVFObjC* _parent;
71 }
72
73 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
74 - (void)invalidate;
75 - (void)beginObservingLayers;
76 - (void)stopObservingLayers;
77 @end
78
79 @implementation WebAVSampleBufferStatusChangeListener
80
81 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)parent
82 {
83     if (!(self = [super init]))
84         return nil;
85
86     _parent = parent;
87
88     return self;
89 }
90
91 - (void)dealloc
92 {
93     [self invalidate];
94     [super dealloc];
95 }
96
97 - (void)invalidate
98 {
99     [self stopObservingLayers];
100     _parent = nullptr;
101 }
102
103 - (void)beginObservingLayers
104 {
105     ASSERT(_parent);
106     ASSERT(_parent->displayLayer());
107     ASSERT(_parent->backgroundLayer());
108
109     [_parent->displayLayer() addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
110     [_parent->displayLayer() addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nil];
111     [_parent->backgroundLayer() addObserver:self forKeyPath:@"bounds" options:NSKeyValueObservingOptionNew context:nil];
112 }
113
114 - (void)stopObservingLayers
115 {
116     if (!_parent)
117         return;
118
119     if (_parent->displayLayer()) {
120         [_parent->displayLayer() removeObserver:self forKeyPath:@"status"];
121         [_parent->displayLayer() removeObserver:self forKeyPath:@"error"];
122     }
123     if (_parent->backgroundLayer())
124         [_parent->backgroundLayer() removeObserver:self forKeyPath:@"bounds"];
125 }
126
127 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
128 {
129     UNUSED_PARAM(context);
130     UNUSED_PARAM(keyPath);
131     ASSERT(_parent);
132
133     if (!_parent)
134         return;
135
136     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
137         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
138         ASSERT(layer.get() == _parent->displayLayer());
139
140         if ([keyPath isEqualToString:@"status"]) {
141             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
142             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
143                 if (!protectedSelf->_parent)
144                     return;
145
146                 protectedSelf->_parent->layerStatusDidChange(layer.get());
147             });
148             return;
149         }
150
151         if ([keyPath isEqualToString:@"error"]) {
152             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
153             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
154                 if (!protectedSelf->_parent)
155                     return;
156
157                 protectedSelf->_parent->layerErrorDidChange(layer.get());
158             });
159             return;
160         }
161     }
162
163     if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
164         return;
165
166     if ((CALayer *)object == _parent->backgroundLayer()) {
167         if ([keyPath isEqualToString:@"bounds"]) {
168             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
169                 if (!protectedSelf->_parent)
170                     return;
171
172                 protectedSelf->_parent->backgroundLayerBoundsChanged();
173             });
174         }
175     }
176
177 }
178 @end
179
180 namespace WebCore {
181 using namespace PAL;
182
183 #pragma mark -
184 #pragma mark MediaPlayerPrivateMediaStreamAVFObjC
185
186 static const double rendererLatency = 0.02;
187
188 MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer* player)
189     : m_player(player)
190     , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
191     , m_clock(PAL::Clock::create())
192     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
193 #if !RELEASE_LOG_DISABLED
194     , m_logger(player->mediaPlayerLogger())
195     , m_logIdentifier(player->mediaPlayerLogIdentifier())
196 #endif
197 {
198     INFO_LOG(LOGIDENTIFIER);
199 }
200
201 MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
202 {
203     INFO_LOG(LOGIDENTIFIER);
204
205     [m_statusChangeListener invalidate];
206
207     for (const auto& track : m_audioTrackMap.values())
208         track->pause();
209
210     if (m_mediaStreamPrivate) {
211         m_mediaStreamPrivate->removeObserver(*this);
212
213         for (auto& track : m_mediaStreamPrivate->tracks())
214             track->removeObserver(*this);
215     }
216
217     destroyLayers();
218
219     m_audioTrackMap.clear();
220     m_videoTrackMap.clear();
221 }
222
223 #pragma mark -
224 #pragma mark MediaPlayer Factory Methods
225
226 void MediaPlayerPrivateMediaStreamAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
227 {
228     if (isAvailable())
229         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaStreamAVFObjC>(player); }, getSupportedTypes,
230             supportsType, 0, 0, 0, 0);
231 }
232
233 bool MediaPlayerPrivateMediaStreamAVFObjC::isAvailable()
234 {
235     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable() && getAVSampleBufferDisplayLayerClass();
236 }
237
238 void MediaPlayerPrivateMediaStreamAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
239 {
240     // FIXME: Is it really correct to list no supported types?
241     types.clear();
242 }
243
244 MediaPlayer::SupportsType MediaPlayerPrivateMediaStreamAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
245 {
246     return parameters.isMediaStream ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
247 }
248
249 #pragma mark -
250 #pragma mark AVSampleBuffer Methods
251
252 void MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue(PendingSampleQueue& queue)
253 {
254     if (queue.isEmpty())
255         return;
256
257     auto decodeTime = queue.first()->decodeTime();
258     if (!decodeTime.isValid() || decodeTime < MediaTime::zeroTime()) {
259         while (queue.size() > 5)
260             queue.removeFirst();
261
262         return;
263     }
264
265     MediaTime now = streamTime();
266     while (!queue.isEmpty()) {
267         if (queue.first()->decodeTime() > now)
268             break;
269         queue.removeFirst();
270     }
271 }
272
273 void MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue(PendingSampleQueue& queue, MediaSample& sample)
274 {
275     removeOldSamplesFromPendingQueue(queue);
276     queue.append(sample);
277 }
278
279 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::calculateTimelineOffset(const MediaSample& sample, double latency)
280 {
281     MediaTime sampleTime = sample.outputPresentationTime();
282     if (!sampleTime || !sampleTime.isValid())
283         sampleTime = sample.presentationTime();
284     MediaTime timelineOffset = streamTime() - sampleTime + MediaTime::createWithDouble(latency);
285     if (timelineOffset.timeScale() != sampleTime.timeScale())
286         timelineOffset = PAL::toMediaTime(CMTimeConvertScale(PAL::toCMTime(timelineOffset), sampleTime.timeScale(), kCMTimeRoundingMethod_Default));
287     return timelineOffset;
288 }
289
290 CGAffineTransform MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix(MediaSample& sample, bool forceUpdate)
291 {
292     if (!forceUpdate && m_transformIsValid)
293         return m_videoTransform;
294
295     CMSampleBufferRef sampleBuffer = sample.platformSample().sample.cmSampleBuffer;
296     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sampleBuffer));
297     size_t width = CVPixelBufferGetWidth(pixelBuffer);
298     size_t height = CVPixelBufferGetHeight(pixelBuffer);
299     if (!width || !height)
300         return CGAffineTransformIdentity;
301
302     ASSERT(m_videoRotation >= MediaSample::VideoRotation::None);
303     ASSERT(m_videoRotation <= MediaSample::VideoRotation::Left);
304
305     m_videoTransform = CGAffineTransformMakeRotation(static_cast<int>(m_videoRotation) * M_PI / 180);
306     if (sample.videoMirrored())
307         m_videoTransform = CGAffineTransformScale(m_videoTransform, -1, 1);
308
309     m_transformIsValid = true;
310     return m_videoTransform;
311 }
312
313 static void runWithoutAnimations(const WTF::Function<void()>& function)
314 {
315     [CATransaction begin];
316     [CATransaction setAnimationDuration:0];
317     [CATransaction setDisableActions:YES];
318     function();
319     [CATransaction commit];
320 }
321
322 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueCorrectedVideoSample(MediaSample& sample)
323 {
324     if (m_sampleBufferDisplayLayer) {
325         if ([m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed)
326             return;
327
328         if (sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored) {
329             m_videoRotation = sample.videoRotation();
330             m_videoMirrored = sample.videoMirrored();
331             runWithoutAnimations([this, &sample] {
332                 m_sampleBufferDisplayLayer.get().affineTransform = videoTransformationMatrix(sample, true);
333                 updateDisplayLayer();
334             });
335         }
336
337         if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
338             addSampleToPendingQueue(m_pendingVideoSampleQueue, sample);
339             requestNotificationWhenReadyForVideoData();
340             return;
341         }
342
343         [m_sampleBufferDisplayLayer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
344     }
345
346     if (!m_hasEverEnqueuedVideoFrame) {
347         m_hasEverEnqueuedVideoFrame = true;
348         m_player->firstVideoFrameAvailable();
349     }
350 }
351
352 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate& track, MediaSample& sample)
353 {
354     ASSERT(m_videoTrackMap.contains(track.id()));
355
356     if (&track != m_mediaStreamPrivate->activeVideoTrack())
357         return;
358
359     if (!m_imagePainter.mediaSample || m_displayMode != PausedImage) {
360         m_imagePainter.mediaSample = &sample;
361         m_imagePainter.cgImage = nullptr;
362         if (m_readyState < MediaPlayer::ReadyState::HaveEnoughData)
363             updateReadyState();
364     }
365
366     if (m_displayMode != LivePreview || (m_displayMode == PausedImage && m_imagePainter.mediaSample))
367         return;
368
369     auto videoTrack = m_videoTrackMap.get(track.id());
370     MediaTime timelineOffset = videoTrack->timelineOffset();
371     if (timelineOffset == MediaTime::invalidTime()) {
372         timelineOffset = calculateTimelineOffset(sample, rendererLatency);
373         videoTrack->setTimelineOffset(timelineOffset);
374
375         INFO_LOG(LOGIDENTIFIER, "timeline offset for track ", track.id(), " set to ", timelineOffset);
376     }
377
378     DEBUG_LOG(LOGIDENTIFIER, "original sample = ", toString(sample));
379     sample.offsetTimestampsBy(timelineOffset);
380     DEBUG_LOG(LOGIDENTIFIER, "updated sample = ", toString(sample));
381
382     if (WILL_LOG(WTFLogLevelDebug)) {
383         MediaTime now = streamTime();
384         double delta = (sample.presentationTime() - now).toDouble();
385         if (delta < 0)
386             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* sample at time is ", now, " is", -delta, " seconds late");
387         else if (delta < .01)
388             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is only ", delta, " seconds early");
389         else if (delta > .3)
390             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is ", delta, " seconds early!");
391     }
392
393     enqueueCorrectedVideoSample(sample);
394 }
395
396 void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
397 {
398     auto weakThis = makeWeakPtr(*this);
399     [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
400         if (!weakThis)
401             return;
402
403         [m_sampleBufferDisplayLayer stopRequestingMediaData];
404
405         while (!m_pendingVideoSampleQueue.isEmpty()) {
406             if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
407                 requestNotificationWhenReadyForVideoData();
408                 return;
409             }
410
411             auto sample = m_pendingVideoSampleQueue.takeFirst();
412             enqueueVideoSample(*m_activeVideoTrack.get(), sample.get());
413         }
414     }];
415 }
416
417 AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
418 {
419     // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
420     return nullptr;
421 }
422
423 void MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(AVSampleBufferDisplayLayer* layer)
424 {
425     UNUSED_PARAM(layer);
426     ERROR_LOG(LOGIDENTIFIER, "error = ", [[layer.error localizedDescription] UTF8String]);
427 }
428
429 void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer)
430 {
431     ALWAYS_LOG(LOGIDENTIFIER, "status = ", (int)layer.status);
432
433     if (layer.status != AVQueuedSampleBufferRenderingStatusRendering)
434         return;
435     if (!m_sampleBufferDisplayLayer || !m_activeVideoTrack || layer != m_sampleBufferDisplayLayer)
436         return;
437
438     auto track = m_videoTrackMap.get(m_activeVideoTrack->id());
439     if (track)
440         track->setTimelineOffset(MediaTime::invalidTime());
441 }
442
443 void MediaPlayerPrivateMediaStreamAVFObjC::applicationDidBecomeActive()
444 {
445     if (m_sampleBufferDisplayLayer && [m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed) {
446         flushRenderers();
447         if (m_imagePainter.mediaSample)
448             enqueueCorrectedVideoSample(*m_imagePainter.mediaSample);
449         updateDisplayMode();
450     }
451 }
452
453 void MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers()
454 {
455     if (m_sampleBufferDisplayLayer)
456         [m_sampleBufferDisplayLayer flush];
457 }
458
459 void MediaPlayerPrivateMediaStreamAVFObjC::flushAndRemoveVideoSampleBuffers()
460 {
461     [m_sampleBufferDisplayLayer flushAndRemoveImage];
462 }
463
464 void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
465 {
466     if (m_sampleBufferDisplayLayer)
467         return;
468
469     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->activeVideoTrack() || !m_mediaStreamPrivate->activeVideoTrack()->enabled())
470         return;
471
472     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
473     if (!m_sampleBufferDisplayLayer) {
474         ERROR_LOG(LOGIDENTIFIER, "+[AVSampleBufferDisplayLayer alloc] failed.");
475         return;
476     }
477
478     m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
479     m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
480     m_sampleBufferDisplayLayer.get().needsDisplayOnBoundsChange = YES;
481     m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
482
483     m_backgroundLayer = adoptNS([[CALayer alloc] init]);
484     m_backgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
485     m_backgroundLayer.get().needsDisplayOnBoundsChange = YES;
486
487     [m_statusChangeListener beginObservingLayers];
488
489     [m_backgroundLayer addSublayer:m_sampleBufferDisplayLayer.get()];
490
491 #ifndef NDEBUG
492     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer"];
493     [m_backgroundLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer parent"];
494 #endif
495
496     updateRenderingMode();
497     updateDisplayLayer();
498
499     m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
500 }
501
502 void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
503 {
504     [m_statusChangeListener stopObservingLayers];
505     if (m_sampleBufferDisplayLayer) {
506         m_pendingVideoSampleQueue.clear();
507         [m_sampleBufferDisplayLayer stopRequestingMediaData];
508         [m_sampleBufferDisplayLayer flush];
509         m_sampleBufferDisplayLayer = nullptr;
510     }
511     m_backgroundLayer = nullptr;
512
513     updateRenderingMode();
514     
515     m_videoFullscreenLayerManager->didDestroyVideoLayer();
516 }
517
518 #pragma mark -
519 #pragma mark MediaPlayerPrivateInterface Overrides
520
521 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&)
522 {
523     // This media engine only supports MediaStream URLs.
524     scheduleDeferredTask([this] {
525         setNetworkState(MediaPlayer::FormatError);
526     });
527 }
528
529 #if ENABLE(MEDIA_SOURCE)
530 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&, MediaSourcePrivateClient*)
531 {
532     // This media engine only supports MediaStream URLs.
533     scheduleDeferredTask([this] {
534         setNetworkState(MediaPlayer::FormatError);
535     });
536 }
537 #endif
538
539 void MediaPlayerPrivateMediaStreamAVFObjC::load(MediaStreamPrivate& stream)
540 {
541     INFO_LOG(LOGIDENTIFIER);
542
543     m_intrinsicSize = FloatSize();
544
545     m_mediaStreamPrivate = &stream;
546     m_mediaStreamPrivate->addObserver(*this);
547     m_ended = !m_mediaStreamPrivate->active();
548
549     scheduleDeferredTask([this] {
550         updateTracks();
551         setNetworkState(MediaPlayer::Idle);
552         updateReadyState();
553     });
554 }
555
556 bool MediaPlayerPrivateMediaStreamAVFObjC::didPassCORSAccessCheck() const
557 {
558     // We are only doing a check on the active video track since the sole consumer of this API is canvas.
559     // FIXME: We should change the name of didPassCORSAccessCheck if it is expected to stay like this.
560     const auto* track = m_mediaStreamPrivate->activeVideoTrack();
561     return !track || !track->isIsolated();
562 }
563
564 void MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad()
565 {
566     INFO_LOG(LOGIDENTIFIER);
567     if (playing())
568         pause();
569 }
570
571 void MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay()
572 {
573     INFO_LOG(LOGIDENTIFIER);
574 }
575
576 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
577 {
578     if (!m_backgroundLayer || m_displayMode == None)
579         return nullptr;
580
581     return m_videoFullscreenLayerManager->videoInlineLayer();
582 }
583
584 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::displayLayer()
585 {
586     return m_sampleBufferDisplayLayer.get();
587 }
588
589 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayer()
590 {
591     return m_backgroundLayer.get();
592 }
593
594 MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
595 {
596     if (m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
597         return None;
598
599     if (auto* track = m_mediaStreamPrivate->activeVideoTrack()) {
600         if (!track->enabled() || track->muted() || track->ended())
601             return PaintItBlack;
602     }
603
604     if (playing() && !m_ended) {
605         if (!m_mediaStreamPrivate->isProducingData())
606             return PausedImage;
607         return LivePreview;
608     }
609
610     if (m_playbackState == PlaybackState::None || m_ended)
611         return PaintItBlack;
612
613     return PausedImage;
614 }
615
616 bool MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode()
617 {
618     DisplayMode displayMode = currentDisplayMode();
619
620     if (displayMode == m_displayMode)
621         return false;
622
623     INFO_LOG(LOGIDENTIFIER, "updated to ", static_cast<int>(displayMode));
624     m_displayMode = displayMode;
625
626     if (m_sampleBufferDisplayLayer) {
627         runWithoutAnimations([this] {
628             m_sampleBufferDisplayLayer.get().hidden = m_displayMode < PausedImage;
629         });
630     }
631
632     return true;
633 }
634
635 void MediaPlayerPrivateMediaStreamAVFObjC::play()
636 {
637     ALWAYS_LOG(LOGIDENTIFIER);
638
639     if (!metaDataAvailable() || playing() || m_ended)
640         return;
641
642     m_playbackState = PlaybackState::Playing;
643     if (!m_clock->isRunning())
644         m_clock->start();
645
646     for (const auto& track : m_audioTrackMap.values())
647         track->play();
648
649     m_shouldDisplayFirstVideoFrame = true;
650     updateDisplayMode();
651
652     scheduleDeferredTask([this] {
653         updateReadyState();
654         if (m_player)
655             m_player->rateChanged();
656     });
657 }
658
659 void MediaPlayerPrivateMediaStreamAVFObjC::pause()
660 {
661     ALWAYS_LOG(LOGIDENTIFIER);
662
663     if (!metaDataAvailable() || !playing() || m_ended)
664         return;
665
666     m_pausedTime = currentMediaTime();
667     m_playbackState = PlaybackState::Paused;
668
669     for (const auto& track : m_audioTrackMap.values())
670         track->pause();
671
672     updateDisplayMode();
673     flushRenderers();
674
675     scheduleDeferredTask([this] {
676         if (m_player)
677             m_player->rateChanged();
678     });
679 }
680
681 void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
682 {
683     if (m_volume == volume)
684         return;
685
686     ALWAYS_LOG(LOGIDENTIFIER, volume);
687     m_volume = volume;
688     for (const auto& track : m_audioTrackMap.values())
689         track->setVolume(m_muted ? 0 : m_volume);
690 }
691
692 void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
693 {
694     if (muted == m_muted)
695         return;
696
697     ALWAYS_LOG(LOGIDENTIFIER, muted);
698     m_muted = muted;
699     for (const auto& track : m_audioTrackMap.values())
700         track->setVolume(m_muted ? 0 : m_volume);
701 }
702
703 bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
704 {
705     if (!metaDataAvailable())
706         return false;
707     
708     return m_mediaStreamPrivate->hasVideo();
709 }
710
711 bool MediaPlayerPrivateMediaStreamAVFObjC::hasAudio() const
712 {
713     if (!metaDataAvailable())
714         return false;
715     
716     return m_mediaStreamPrivate->hasAudio();
717 }
718
719 void MediaPlayerPrivateMediaStreamAVFObjC::setVisible(bool visible)
720 {
721     if (m_visible == visible)
722         return;
723
724     m_visible = visible;
725     if (m_visible)
726         flushRenderers();
727 }
728
729 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::durationMediaTime() const
730 {
731     return MediaTime::positiveInfiniteTime();
732 }
733
734 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::currentMediaTime() const
735 {
736     if (paused())
737         return m_pausedTime;
738
739     return streamTime();
740 }
741
742 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
743 {
744     return MediaTime::createWithDouble(m_clock->currentTime());
745 }
746
747 MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
748 {
749     return m_networkState;
750 }
751
752 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::readyState() const
753 {
754     return m_readyState;
755 }
756
757 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState()
758 {
759     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->active() || !m_mediaStreamPrivate->tracks().size())
760         return MediaPlayer::ReadyState::HaveNothing;
761
762     bool allTracksAreLive = true;
763     for (auto& track : m_mediaStreamPrivate->tracks()) {
764         if (!track->enabled() || track->readyState() != MediaStreamTrackPrivate::ReadyState::Live)
765             allTracksAreLive = false;
766
767         if (track == m_mediaStreamPrivate->activeVideoTrack() && !m_imagePainter.mediaSample) {
768             if (!m_haveSeenMetadata)
769                 return MediaPlayer::ReadyState::HaveNothing;
770             allTracksAreLive = false;
771         }
772     }
773
774     if (!allTracksAreLive && !m_haveSeenMetadata)
775         return MediaPlayer::ReadyState::HaveMetadata;
776
777     return MediaPlayer::ReadyState::HaveEnoughData;
778 }
779
780 void MediaPlayerPrivateMediaStreamAVFObjC::updateReadyState()
781 {
782     MediaPlayer::ReadyState newReadyState = currentReadyState();
783
784     if (newReadyState != m_readyState) {
785         ALWAYS_LOG(LOGIDENTIFIER, "updated to ", (int)newReadyState);
786         setReadyState(newReadyState);
787     }
788 }
789
790 void MediaPlayerPrivateMediaStreamAVFObjC::activeStatusChanged()
791 {
792     scheduleDeferredTask([this] {
793         bool ended = !m_mediaStreamPrivate->active();
794         if (ended && playing())
795             pause();
796
797         updateReadyState();
798         updateDisplayMode();
799
800         if (ended != m_ended) {
801             m_ended = ended;
802             if (m_player) {
803                 m_player->timeChanged();
804                 m_player->characteristicChanged();
805             }
806         }
807     });
808 }
809
810 void MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode()
811 {
812     if (!updateDisplayMode())
813         return;
814
815     scheduleDeferredTask([this] {
816         m_transformIsValid = false;
817         if (m_player)
818             m_player->client().mediaPlayerRenderingModeChanged(m_player);
819     });
820
821 }
822
823 void MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged()
824 {
825     bool sizeChanged = false;
826
827     FloatSize intrinsicSize = m_mediaStreamPrivate->intrinsicSize();
828     if (intrinsicSize.height() != m_intrinsicSize.height() || intrinsicSize.width() != m_intrinsicSize.width()) {
829         m_intrinsicSize = intrinsicSize;
830         sizeChanged = true;
831     }
832
833     updateTracks();
834     updateDisplayMode();
835
836     scheduleDeferredTask([this, sizeChanged] {
837         updateReadyState();
838
839         if (!m_player)
840             return;
841
842         m_player->characteristicChanged();
843         if (sizeChanged) {
844             m_player->sizeChanged();
845         }
846     });
847 }
848
849 void MediaPlayerPrivateMediaStreamAVFObjC::didAddTrack(MediaStreamTrackPrivate&)
850 {
851     updateTracks();
852 }
853
854 void MediaPlayerPrivateMediaStreamAVFObjC::didRemoveTrack(MediaStreamTrackPrivate&)
855 {
856     updateTracks();
857 }
858
859 void MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated(MediaStreamTrackPrivate& track, MediaSample& mediaSample)
860 {
861     ASSERT(track.id() == mediaSample.trackID());
862     ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
863     ASSERT(m_mediaStreamPrivate);
864
865     if (streamTime().toDouble() < 0)
866         return;
867
868     switch (track.type()) {
869     case RealtimeMediaSource::Type::None:
870         // Do nothing.
871         break;
872     case RealtimeMediaSource::Type::Audio:
873         break;
874     case RealtimeMediaSource::Type::Video:
875         if (&track == m_activeVideoTrack.get())
876             enqueueVideoSample(track, mediaSample);
877         break;
878     }
879 }
880
881 void MediaPlayerPrivateMediaStreamAVFObjC::readyStateChanged(MediaStreamTrackPrivate&)
882 {
883     scheduleDeferredTask([this] {
884         updateReadyState();
885     });
886 }
887
888 bool MediaPlayerPrivateMediaStreamAVFObjC::supportsPictureInPicture() const
889 {
890 #if PLATFORM(IOS_FAMILY)
891     for (const auto& track : m_videoTrackMap.values()) {
892         if (track->streamTrack().isCaptureTrack())
893             return false;
894     }
895 #endif
896     
897     return true;
898 }
899
900 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
901 {
902     updateCurrentFrameImage();
903     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_imagePainter.cgImage);
904 }
905
906 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenFrame(FloatRect frame)
907 {
908     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
909 }
910
911 typedef enum {
912     Add,
913     Remove,
914     Configure
915 } TrackState;
916
917 template <typename RefT>
918 void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), const Function<void(typename RefT::ValueType&, int, TrackState)>& configureTrack)
919 {
920     Vector<RefT> removedTracks;
921     Vector<RefT> addedTracks;
922     Vector<RefPtr<MediaStreamTrackPrivate>> addedPrivateTracks;
923
924     for (const auto& track : currentTracks) {
925         if (track->type() != trackType)
926             continue;
927
928         if (!trackMap.contains(track->id()))
929             addedPrivateTracks.append(track);
930     }
931
932     for (const auto& track : trackMap.values()) {
933         auto& streamTrack = track->streamTrack();
934         if (currentTracks.contains(&streamTrack))
935             continue;
936
937         removedTracks.append(track);
938     }
939     for (auto& track : removedTracks)
940         trackMap.remove(track->streamTrack().id());
941
942     for (auto& track : addedPrivateTracks) {
943         RefT newTrack = itemFactory(*track.get());
944         trackMap.add(track->id(), newTrack);
945         addedTracks.append(newTrack);
946     }
947
948     int index = 0;
949     for (auto& track : removedTracks)
950         configureTrack(*track, index++, TrackState::Remove);
951
952     index = 0;
953     for (auto& track : addedTracks)
954         configureTrack(*track, index++, TrackState::Add);
955
956     index = 0;
957     for (const auto& track : trackMap.values())
958         configureTrack(*track, index++, TrackState::Configure);
959 }
960
961 void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
962 {
963     if (m_pendingSelectedTrackCheck)
964         return;
965
966     m_pendingSelectedTrackCheck = true;
967     scheduleDeferredTask([this] {
968         auto oldVideoTrack = m_activeVideoTrack;
969         bool hideVideoLayer = true;
970         m_activeVideoTrack = nullptr;
971         if (m_mediaStreamPrivate->activeVideoTrack()) {
972             for (const auto& track : m_videoTrackMap.values()) {
973                 if (&track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack()) {
974                     m_activeVideoTrack = m_mediaStreamPrivate->activeVideoTrack();
975                     if (track->selected())
976                         hideVideoLayer = false;
977                     break;
978                 }
979             }
980         }
981
982         if (oldVideoTrack != m_activeVideoTrack)
983             m_imagePainter.reset();
984         ensureLayers();
985         m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer || m_displayMode < PausedImage;
986         m_pendingSelectedTrackCheck = false;
987         updateDisplayMode();
988     });
989 }
990
991 void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
992 {
993     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
994
995     auto setAudioTrackState = [this](AudioTrackPrivateMediaStreamCocoa& track, int index, TrackState state)
996     {
997         switch (state) {
998         case TrackState::Remove:
999             track.streamTrack().removeObserver(*this);
1000             m_player->removeAudioTrack(track);
1001             break;
1002         case TrackState::Add:
1003             track.streamTrack().addObserver(*this);
1004             m_player->addAudioTrack(track);
1005             break;
1006         case TrackState::Configure:
1007             track.setTrackIndex(index);
1008             bool enabled = track.streamTrack().enabled() && !track.streamTrack().muted();
1009             track.setEnabled(enabled);
1010             break;
1011         }
1012     };
1013     updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Type::Audio, currentTracks, &AudioTrackPrivateMediaStreamCocoa::create, WTFMove(setAudioTrackState));
1014
1015     auto setVideoTrackState = [this](VideoTrackPrivateMediaStream& track, int index, TrackState state)
1016     {
1017         switch (state) {
1018         case TrackState::Remove:
1019             track.streamTrack().removeObserver(*this);
1020             m_player->removeVideoTrack(track);
1021             checkSelectedVideoTrack();
1022             break;
1023         case TrackState::Add:
1024             track.streamTrack().addObserver(*this);
1025             m_player->addVideoTrack(track);
1026             break;
1027         case TrackState::Configure:
1028             track.setTrackIndex(index);
1029             bool selected = &track.streamTrack() == m_mediaStreamPrivate->activeVideoTrack();
1030             track.setSelected(selected);
1031             checkSelectedVideoTrack();
1032             break;
1033         }
1034     };
1035     updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Type::Video, currentTracks, &VideoTrackPrivateMediaStream::create, WTFMove(setVideoTrackState));
1036 }
1037
1038 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
1039 {
1040     return std::make_unique<PlatformTimeRanges>();
1041 }
1042
1043 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::buffered() const
1044 {
1045     return std::make_unique<PlatformTimeRanges>();
1046 }
1047
1048 void MediaPlayerPrivateMediaStreamAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
1049 {
1050     paintCurrentFrameInContext(context, rect);
1051 }
1052
1053 void MediaPlayerPrivateMediaStreamAVFObjC::updateCurrentFrameImage()
1054 {
1055     if (m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1056         return;
1057
1058     if (!m_imagePainter.pixelBufferConformer)
1059         m_imagePainter.pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)@{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) });
1060
1061     ASSERT(m_imagePainter.pixelBufferConformer);
1062     if (!m_imagePainter.pixelBufferConformer)
1063         return;
1064
1065     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_imagePainter.mediaSample->platformSample().sample.cmSampleBuffer));
1066     m_imagePainter.cgImage = m_imagePainter.pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer);
1067 }
1068
1069 void MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& destRect)
1070 {
1071     if (m_displayMode == None || !metaDataAvailable() || context.paintingDisabled())
1072         return;
1073
1074     if (m_displayMode != PaintItBlack && m_imagePainter.mediaSample)
1075         updateCurrentFrameImage();
1076
1077     GraphicsContextStateSaver stateSaver(context);
1078     if (m_displayMode == PaintItBlack || !m_imagePainter.cgImage || !m_imagePainter.mediaSample) {
1079         context.fillRect(IntRect(IntPoint(), IntSize(destRect.width(), destRect.height())), Color::black);
1080         return;
1081     }
1082
1083     auto image = m_imagePainter.cgImage.get();
1084     FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
1085     AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
1086     FloatRect transformedDestRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(destRect);
1087     context.concatCTM(videoTransform);
1088     context.drawNativeImage(image, imageRect.size(), transformedDestRect, imageRect);
1089 }
1090
1091 void MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged()
1092 {
1093     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
1094         ensureLayers();
1095     else
1096         destroyLayers();
1097 }
1098
1099 String MediaPlayerPrivateMediaStreamAVFObjC::engineDescription() const
1100 {
1101     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaStream Engine"));
1102     return description;
1103 }
1104
1105 void MediaPlayerPrivateMediaStreamAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1106 {
1107     if (m_readyState == readyState)
1108         return;
1109
1110     if (readyState != MediaPlayer::ReadyState::HaveNothing)
1111         m_haveSeenMetadata = true;
1112     m_readyState = readyState;
1113     characteristicsChanged();
1114
1115     m_player->readyStateChanged();
1116 }
1117
1118 void MediaPlayerPrivateMediaStreamAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1119 {
1120     if (m_networkState == networkState)
1121         return;
1122
1123     m_networkState = networkState;
1124     m_player->networkStateChanged();
1125 }
1126
1127 void MediaPlayerPrivateMediaStreamAVFObjC::setShouldBufferData(bool shouldBuffer)
1128 {
1129     if (!shouldBuffer)
1130         flushAndRemoveVideoSampleBuffers();
1131 }
1132
1133 void MediaPlayerPrivateMediaStreamAVFObjC::scheduleDeferredTask(Function<void ()>&& function)
1134 {
1135     ASSERT(function);
1136     callOnMainThread([weakThis = makeWeakPtr(*this), function = WTFMove(function)] {
1137         if (!weakThis)
1138             return;
1139
1140         function();
1141     });
1142 }
1143
1144 void MediaPlayerPrivateMediaStreamAVFObjC::CurrentFramePainter::reset()
1145 {
1146     cgImage = nullptr;
1147     mediaSample = nullptr;
1148     pixelBufferConformer = nullptr;
1149 }
1150
1151 void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
1152 {
1153     if (!m_backgroundLayer || !m_sampleBufferDisplayLayer)
1154         return;
1155
1156     auto backgroundBounds = m_backgroundLayer.get().bounds;
1157     auto videoBounds = backgroundBounds;
1158     if (m_videoRotation == MediaSample::VideoRotation::Right || m_videoRotation == MediaSample::VideoRotation::Left)
1159         std::swap(videoBounds.size.width, videoBounds.size.height);
1160
1161     m_sampleBufferDisplayLayer.get().bounds = videoBounds;
1162     m_sampleBufferDisplayLayer.get().position = { backgroundBounds.size.width / 2, backgroundBounds.size.height / 2};
1163 }
1164
1165 void MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayerBoundsChanged()
1166 {
1167     scheduleDeferredTask([this] {
1168         runWithoutAnimations([this] {
1169             updateDisplayLayer();
1170         });
1171     });
1172 }
1173
1174 #if !RELEASE_LOG_DISABLED
1175 WTFLogChannel& MediaPlayerPrivateMediaStreamAVFObjC::logChannel() const
1176 {
1177     return LogMedia;
1178 }
1179 #endif
1180
1181 }
1182
1183 #endif