Unreviewed, rolling out r244627.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaStreamAVFObjC.mm
1 /*
2  * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaStreamAVFObjC.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AudioTrackPrivateMediaStreamCocoa.h"
32 #import "GraphicsContextCG.h"
33 #import "Logging.h"
34 #import "MediaStreamPrivate.h"
35 #import "PixelBufferConformerCV.h"
36 #import "VideoFullscreenLayerManagerObjC.h"
37 #import "VideoTrackPrivateMediaStream.h"
38 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
39 #import <QuartzCore/CALayer.h>
40 #import <QuartzCore/CATransaction.h>
41 #import <objc_runtime.h>
42 #import <pal/avfoundation/MediaTimeAVFoundation.h>
43 #import <pal/spi/mac/AVFoundationSPI.h>
44 #import <pal/system/Clock.h>
45 #import <wtf/Function.h>
46 #import <wtf/MainThread.h>
47 #import <wtf/NeverDestroyed.h>
48
49
50 #pragma mark - Soft Linking
51
52 #import <pal/cf/CoreMediaSoftLink.h>
53 #import "CoreVideoSoftLink.h"
54
55 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
56
57 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
58
59 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
60 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
61 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
62
63 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
64 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
65 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
66
67 using namespace WebCore;
68
69 @interface WebAVSampleBufferStatusChangeListener : NSObject {
70     MediaPlayerPrivateMediaStreamAVFObjC* _parent;
71 }
72
73 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
74 - (void)invalidate;
75 - (void)beginObservingLayers;
76 - (void)stopObservingLayers;
77 @end
78
79 @implementation WebAVSampleBufferStatusChangeListener
80
81 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)parent
82 {
83     if (!(self = [super init]))
84         return nil;
85
86     _parent = parent;
87
88     return self;
89 }
90
91 - (void)dealloc
92 {
93     [self invalidate];
94     [super dealloc];
95 }
96
97 - (void)invalidate
98 {
99     [self stopObservingLayers];
100     _parent = nullptr;
101 }
102
103 - (void)beginObservingLayers
104 {
105     ASSERT(_parent);
106     ASSERT(_parent->displayLayer());
107     ASSERT(_parent->backgroundLayer());
108
109     [_parent->displayLayer() addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
110     [_parent->displayLayer() addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nil];
111     [_parent->backgroundLayer() addObserver:self forKeyPath:@"bounds" options:NSKeyValueObservingOptionNew context:nil];
112 }
113
114 - (void)stopObservingLayers
115 {
116     if (!_parent)
117         return;
118
119     if (_parent->displayLayer()) {
120         [_parent->displayLayer() removeObserver:self forKeyPath:@"status"];
121         [_parent->displayLayer() removeObserver:self forKeyPath:@"error"];
122     }
123     if (_parent->backgroundLayer())
124         [_parent->backgroundLayer() removeObserver:self forKeyPath:@"bounds"];
125 }
126
127 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
128 {
129     UNUSED_PARAM(context);
130     UNUSED_PARAM(keyPath);
131     ASSERT(_parent);
132
133     if (!_parent)
134         return;
135
136     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
137         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
138         ASSERT(layer.get() == _parent->displayLayer());
139
140         if ([keyPath isEqualToString:@"status"]) {
141             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
142             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
143                 if (!protectedSelf->_parent)
144                     return;
145
146                 protectedSelf->_parent->layerStatusDidChange(layer.get());
147             });
148             return;
149         }
150
151         if ([keyPath isEqualToString:@"error"]) {
152             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
153             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
154                 if (!protectedSelf->_parent)
155                     return;
156
157                 protectedSelf->_parent->layerErrorDidChange(layer.get());
158             });
159             return;
160         }
161     }
162
163     if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
164         return;
165
166     if ((CALayer *)object == _parent->backgroundLayer()) {
167         if ([keyPath isEqualToString:@"bounds"]) {
168             if (!_parent)
169                 return;
170
171             if (isMainThread()) {
172                 _parent->backgroundLayerBoundsChanged();
173                 return;
174             }
175
176             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
177                 if (!protectedSelf->_parent)
178                     return;
179
180                 protectedSelf->_parent->backgroundLayerBoundsChanged();
181             });
182         }
183     }
184
185 }
186 @end
187
188 namespace WebCore {
189 using namespace PAL;
190
191 #pragma mark -
192 #pragma mark MediaPlayerPrivateMediaStreamAVFObjC
193
194 static const double rendererLatency = 0.02;
195
196 MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer* player)
197     : m_player(player)
198     , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
199     , m_clock(PAL::Clock::create())
200     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
201 #if !RELEASE_LOG_DISABLED
202     , m_logger(player->mediaPlayerLogger())
203     , m_logIdentifier(player->mediaPlayerLogIdentifier())
204 #endif
205 {
206     INFO_LOG(LOGIDENTIFIER);
207 }
208
209 MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
210 {
211     INFO_LOG(LOGIDENTIFIER);
212
213     [m_statusChangeListener invalidate];
214
215     for (const auto& track : m_audioTrackMap.values())
216         track->pause();
217
218     if (m_mediaStreamPrivate) {
219         m_mediaStreamPrivate->removeObserver(*this);
220
221         for (auto& track : m_mediaStreamPrivate->tracks())
222             track->removeObserver(*this);
223     }
224
225     destroyLayers();
226
227     m_audioTrackMap.clear();
228     m_videoTrackMap.clear();
229 }
230
231 #pragma mark -
232 #pragma mark MediaPlayer Factory Methods
233
234 void MediaPlayerPrivateMediaStreamAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
235 {
236     if (isAvailable())
237         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaStreamAVFObjC>(player); }, getSupportedTypes,
238             supportsType, 0, 0, 0, 0);
239 }
240
241 bool MediaPlayerPrivateMediaStreamAVFObjC::isAvailable()
242 {
243     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable() && getAVSampleBufferDisplayLayerClass();
244 }
245
246 void MediaPlayerPrivateMediaStreamAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
247 {
248     // FIXME: Is it really correct to list no supported types?
249     types.clear();
250 }
251
252 MediaPlayer::SupportsType MediaPlayerPrivateMediaStreamAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
253 {
254     return parameters.isMediaStream ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
255 }
256
257 #pragma mark -
258 #pragma mark AVSampleBuffer Methods
259
260 void MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue(PendingSampleQueue& queue)
261 {
262     if (queue.isEmpty())
263         return;
264
265     auto decodeTime = queue.first()->decodeTime();
266     if (!decodeTime.isValid() || decodeTime < MediaTime::zeroTime()) {
267         while (queue.size() > 5)
268             queue.removeFirst();
269
270         return;
271     }
272
273     MediaTime now = streamTime();
274     while (!queue.isEmpty()) {
275         if (queue.first()->decodeTime() > now)
276             break;
277         queue.removeFirst();
278     }
279 }
280
281 void MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue(PendingSampleQueue& queue, MediaSample& sample)
282 {
283     removeOldSamplesFromPendingQueue(queue);
284     queue.append(sample);
285 }
286
287 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::calculateTimelineOffset(const MediaSample& sample, double latency)
288 {
289     MediaTime sampleTime = sample.outputPresentationTime();
290     if (!sampleTime || !sampleTime.isValid())
291         sampleTime = sample.presentationTime();
292     MediaTime timelineOffset = streamTime() - sampleTime + MediaTime::createWithDouble(latency);
293     if (timelineOffset.timeScale() != sampleTime.timeScale())
294         timelineOffset = PAL::toMediaTime(CMTimeConvertScale(PAL::toCMTime(timelineOffset), sampleTime.timeScale(), kCMTimeRoundingMethod_Default));
295     return timelineOffset;
296 }
297
298 CGAffineTransform MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix(MediaSample& sample, bool forceUpdate)
299 {
300     if (!forceUpdate && m_transformIsValid)
301         return m_videoTransform;
302
303     CMSampleBufferRef sampleBuffer = sample.platformSample().sample.cmSampleBuffer;
304     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sampleBuffer));
305     size_t width = CVPixelBufferGetWidth(pixelBuffer);
306     size_t height = CVPixelBufferGetHeight(pixelBuffer);
307     if (!width || !height)
308         return CGAffineTransformIdentity;
309
310     ASSERT(m_videoRotation >= MediaSample::VideoRotation::None);
311     ASSERT(m_videoRotation <= MediaSample::VideoRotation::Left);
312
313     m_videoTransform = CGAffineTransformMakeRotation(static_cast<int>(m_videoRotation) * M_PI / 180);
314     if (sample.videoMirrored())
315         m_videoTransform = CGAffineTransformScale(m_videoTransform, -1, 1);
316
317     m_transformIsValid = true;
318     return m_videoTransform;
319 }
320
321 static void runWithoutAnimations(const WTF::Function<void()>& function)
322 {
323     [CATransaction begin];
324     [CATransaction setAnimationDuration:0];
325     [CATransaction setDisableActions:YES];
326     function();
327     [CATransaction commit];
328 }
329
330 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueCorrectedVideoSample(MediaSample& sample)
331 {
332     if (m_sampleBufferDisplayLayer) {
333         if ([m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed)
334             return;
335
336         if (sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored) {
337             m_videoRotation = sample.videoRotation();
338             m_videoMirrored = sample.videoMirrored();
339             runWithoutAnimations([this, &sample] {
340                 m_sampleBufferDisplayLayer.get().affineTransform = videoTransformationMatrix(sample, true);
341                 updateDisplayLayer();
342             });
343         }
344
345         if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
346             addSampleToPendingQueue(m_pendingVideoSampleQueue, sample);
347             requestNotificationWhenReadyForVideoData();
348             return;
349         }
350
351         [m_sampleBufferDisplayLayer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
352     }
353
354     if (!m_hasEverEnqueuedVideoFrame) {
355         m_hasEverEnqueuedVideoFrame = true;
356         m_player->firstVideoFrameAvailable();
357     }
358 }
359
360 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate& track, MediaSample& sample)
361 {
362     if (&track != m_mediaStreamPrivate->activeVideoTrack())
363         return;
364
365     if (!m_imagePainter.mediaSample || m_displayMode != PausedImage) {
366         m_imagePainter.mediaSample = &sample;
367         m_imagePainter.cgImage = nullptr;
368         if (m_readyState < MediaPlayer::ReadyState::HaveEnoughData)
369             updateReadyState();
370     }
371
372     if (m_displayMode != LivePreview && !m_waitingForFirstImage)
373         return;
374
375     auto videoTrack = m_videoTrackMap.get(track.id());
376     MediaTime timelineOffset = videoTrack->timelineOffset();
377     if (timelineOffset == MediaTime::invalidTime()) {
378         timelineOffset = calculateTimelineOffset(sample, rendererLatency);
379         videoTrack->setTimelineOffset(timelineOffset);
380
381         INFO_LOG(LOGIDENTIFIER, "timeline offset for track ", track.id(), " set to ", timelineOffset);
382     }
383
384     DEBUG_LOG(LOGIDENTIFIER, "original sample = ", sample);
385     sample.offsetTimestampsBy(timelineOffset);
386     DEBUG_LOG(LOGIDENTIFIER, "updated sample = ", sample);
387
388     if (WILL_LOG(WTFLogLevel::Debug)) {
389         MediaTime now = streamTime();
390         double delta = (sample.presentationTime() - now).toDouble();
391         if (delta < 0)
392             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* sample at time is ", now, " is", -delta, " seconds late");
393         else if (delta < .01)
394             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is only ", delta, " seconds early");
395         else if (delta > .3)
396             DEBUG_LOG(LOGIDENTIFIER, "*NOTE* audio sample at time ", now, " is ", delta, " seconds early!");
397     }
398
399     enqueueCorrectedVideoSample(sample);
400     if (m_waitingForFirstImage) {
401         m_waitingForFirstImage = false;
402         updateDisplayMode();
403     }
404 }
405
406 void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
407 {
408     auto weakThis = makeWeakPtr(*this);
409     [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
410         if (!weakThis)
411             return;
412
413         [m_sampleBufferDisplayLayer stopRequestingMediaData];
414
415         if (!m_activeVideoTrack) {
416             m_pendingVideoSampleQueue.clear();
417             return;
418         }
419
420         while (!m_pendingVideoSampleQueue.isEmpty()) {
421             if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
422                 requestNotificationWhenReadyForVideoData();
423                 return;
424             }
425
426             auto sample = m_pendingVideoSampleQueue.takeFirst();
427             enqueueVideoSample(*m_activeVideoTrack.get(), sample.get());
428         }
429     }];
430 }
431
432 AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
433 {
434     // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
435     return nullptr;
436 }
437
438 void MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(AVSampleBufferDisplayLayer* layer)
439 {
440     UNUSED_PARAM(layer);
441     ERROR_LOG(LOGIDENTIFIER, "error = ", [[layer.error localizedDescription] UTF8String]);
442 }
443
444 void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer)
445 {
446     ALWAYS_LOG(LOGIDENTIFIER, "status = ", (int)layer.status);
447
448     if (layer.status != AVQueuedSampleBufferRenderingStatusRendering)
449         return;
450     if (!m_sampleBufferDisplayLayer || !m_activeVideoTrack || layer != m_sampleBufferDisplayLayer)
451         return;
452
453     auto track = m_videoTrackMap.get(m_activeVideoTrack->id());
454     if (track)
455         track->setTimelineOffset(MediaTime::invalidTime());
456 }
457
458 void MediaPlayerPrivateMediaStreamAVFObjC::applicationDidBecomeActive()
459 {
460     if (m_sampleBufferDisplayLayer && [m_sampleBufferDisplayLayer status] == AVQueuedSampleBufferRenderingStatusFailed) {
461         flushRenderers();
462         if (m_imagePainter.mediaSample)
463             enqueueCorrectedVideoSample(*m_imagePainter.mediaSample);
464         updateDisplayMode();
465     }
466 }
467
468 void MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers()
469 {
470     if (m_sampleBufferDisplayLayer)
471         [m_sampleBufferDisplayLayer flush];
472 }
473
474 void MediaPlayerPrivateMediaStreamAVFObjC::flushAndRemoveVideoSampleBuffers()
475 {
476     [m_sampleBufferDisplayLayer flushAndRemoveImage];
477 }
478
479 void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
480 {
481     if (m_sampleBufferDisplayLayer)
482         return;
483
484     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->activeVideoTrack() || !m_mediaStreamPrivate->activeVideoTrack()->enabled())
485         return;
486
487     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
488     if (!m_sampleBufferDisplayLayer) {
489         ERROR_LOG(LOGIDENTIFIER, "+[AVSampleBufferDisplayLayer alloc] failed.");
490         return;
491     }
492
493     m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
494     m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
495     m_sampleBufferDisplayLayer.get().needsDisplayOnBoundsChange = YES;
496     m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
497
498     m_backgroundLayer = adoptNS([[CALayer alloc] init]);
499     m_backgroundLayer.get().hidden = hideBackgroundLayer();
500
501     m_backgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
502     m_backgroundLayer.get().needsDisplayOnBoundsChange = YES;
503
504     auto size = snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size();
505     m_backgroundLayer.get().bounds = CGRectMake(0, 0, size.width(), size.height());
506
507     [m_statusChangeListener beginObservingLayers];
508
509     [m_backgroundLayer addSublayer:m_sampleBufferDisplayLayer.get()];
510
511 #ifndef NDEBUG
512     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer"];
513     [m_backgroundLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer parent"];
514 #endif
515
516     updateRenderingMode();
517     updateDisplayLayer();
518
519     m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), size);
520 }
521
522 void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
523 {
524     [m_statusChangeListener stopObservingLayers];
525     if (m_sampleBufferDisplayLayer) {
526         m_pendingVideoSampleQueue.clear();
527         [m_sampleBufferDisplayLayer stopRequestingMediaData];
528         [m_sampleBufferDisplayLayer flush];
529         m_sampleBufferDisplayLayer = nullptr;
530     }
531     m_backgroundLayer = nullptr;
532
533     updateRenderingMode();
534     
535     m_videoFullscreenLayerManager->didDestroyVideoLayer();
536 }
537
538 #pragma mark -
539 #pragma mark MediaPlayerPrivateInterface Overrides
540
541 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&)
542 {
543     // This media engine only supports MediaStream URLs.
544     scheduleDeferredTask([this] {
545         setNetworkState(MediaPlayer::FormatError);
546     });
547 }
548
549 #if ENABLE(MEDIA_SOURCE)
550 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&, MediaSourcePrivateClient*)
551 {
552     // This media engine only supports MediaStream URLs.
553     scheduleDeferredTask([this] {
554         setNetworkState(MediaPlayer::FormatError);
555     });
556 }
557 #endif
558
559 void MediaPlayerPrivateMediaStreamAVFObjC::load(MediaStreamPrivate& stream)
560 {
561     INFO_LOG(LOGIDENTIFIER);
562
563     m_intrinsicSize = FloatSize();
564
565     m_mediaStreamPrivate = &stream;
566     m_mediaStreamPrivate->addObserver(*this);
567     m_ended = !m_mediaStreamPrivate->active();
568
569     scheduleDeferredTask([this] {
570         updateTracks();
571         setNetworkState(MediaPlayer::Idle);
572         updateReadyState();
573     });
574 }
575
576 bool MediaPlayerPrivateMediaStreamAVFObjC::didPassCORSAccessCheck() const
577 {
578     // We are only doing a check on the active video track since the sole consumer of this API is canvas.
579     // FIXME: We should change the name of didPassCORSAccessCheck if it is expected to stay like this.
580     const auto* track = m_mediaStreamPrivate->activeVideoTrack();
581     return !track || !track->isIsolated();
582 }
583
584 void MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad()
585 {
586     INFO_LOG(LOGIDENTIFIER);
587     if (playing())
588         pause();
589 }
590
591 void MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay()
592 {
593     INFO_LOG(LOGIDENTIFIER);
594 }
595
596 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
597 {
598     if (!m_backgroundLayer || m_displayMode == None)
599         return nullptr;
600
601     return m_videoFullscreenLayerManager->videoInlineLayer();
602 }
603
604 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::displayLayer()
605 {
606     return m_sampleBufferDisplayLayer.get();
607 }
608
609 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayer()
610 {
611     return m_backgroundLayer.get();
612 }
613
614 MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
615 {
616     if (m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
617         return None;
618
619     if (auto* track = m_mediaStreamPrivate->activeVideoTrack()) {
620         if (!track->enabled() || track->muted() || track->ended())
621             return PaintItBlack;
622     }
623
624     if (m_waitingForFirstImage)
625         return WaitingForFirstImage;
626
627     if (playing() && !m_ended) {
628         if (!m_mediaStreamPrivate->isProducingData())
629             return PausedImage;
630         return LivePreview;
631     }
632
633     if (m_playbackState == PlaybackState::None || m_ended)
634         return PaintItBlack;
635
636     return PausedImage;
637 }
638
639 bool MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode()
640 {
641     DisplayMode displayMode = currentDisplayMode();
642
643     if (displayMode == m_displayMode)
644         return false;
645
646     INFO_LOG(LOGIDENTIFIER, "updated to ", static_cast<int>(displayMode));
647     m_displayMode = displayMode;
648
649     auto hidden = m_displayMode < PausedImage;
650     if (m_sampleBufferDisplayLayer && m_sampleBufferDisplayLayer.get().hidden != hidden) {
651         runWithoutAnimations([this, hidden] {
652             m_sampleBufferDisplayLayer.get().hidden = hidden;
653         });
654     }
655     hidden = hideBackgroundLayer();
656     if (m_backgroundLayer && m_backgroundLayer.get().hidden != hidden) {
657         runWithoutAnimations([this, hidden] {
658             m_backgroundLayer.get().hidden = hidden;
659         });
660     }
661
662     return true;
663 }
664
665 void MediaPlayerPrivateMediaStreamAVFObjC::play()
666 {
667     ALWAYS_LOG(LOGIDENTIFIER);
668
669     if (!metaDataAvailable() || playing() || m_ended)
670         return;
671
672     m_playbackState = PlaybackState::Playing;
673     if (!m_clock->isRunning())
674         m_clock->start();
675
676     for (const auto& track : m_audioTrackMap.values())
677         track->play();
678
679     updateDisplayMode();
680
681     scheduleDeferredTask([this] {
682         updateReadyState();
683         if (m_player)
684             m_player->rateChanged();
685     });
686 }
687
688 void MediaPlayerPrivateMediaStreamAVFObjC::pause()
689 {
690     ALWAYS_LOG(LOGIDENTIFIER);
691
692     if (!metaDataAvailable() || !playing() || m_ended)
693         return;
694
695     m_pausedTime = currentMediaTime();
696     m_playbackState = PlaybackState::Paused;
697
698     for (const auto& track : m_audioTrackMap.values())
699         track->pause();
700
701     updateDisplayMode();
702     flushRenderers();
703
704     scheduleDeferredTask([this] {
705         if (m_player)
706             m_player->rateChanged();
707     });
708 }
709
710 void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
711 {
712     if (m_volume == volume)
713         return;
714
715     ALWAYS_LOG(LOGIDENTIFIER, volume);
716     m_volume = volume;
717     for (const auto& track : m_audioTrackMap.values())
718         track->setVolume(m_muted ? 0 : m_volume);
719 }
720
721 void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
722 {
723     if (muted == m_muted)
724         return;
725
726     ALWAYS_LOG(LOGIDENTIFIER, muted);
727     m_muted = muted;
728     for (const auto& track : m_audioTrackMap.values())
729         track->setVolume(m_muted ? 0 : m_volume);
730 }
731
732 bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
733 {
734     if (!metaDataAvailable())
735         return false;
736     
737     return m_mediaStreamPrivate->hasVideo();
738 }
739
740 bool MediaPlayerPrivateMediaStreamAVFObjC::hasAudio() const
741 {
742     if (!metaDataAvailable())
743         return false;
744     
745     return m_mediaStreamPrivate->hasAudio();
746 }
747
748 void MediaPlayerPrivateMediaStreamAVFObjC::setVisible(bool visible)
749 {
750     if (m_visible == visible)
751         return;
752
753     m_visible = visible;
754     if (m_visible)
755         flushRenderers();
756 }
757
758 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::durationMediaTime() const
759 {
760     return MediaTime::positiveInfiniteTime();
761 }
762
763 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::currentMediaTime() const
764 {
765     if (paused())
766         return m_pausedTime;
767
768     return streamTime();
769 }
770
771 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
772 {
773     return MediaTime::createWithDouble(m_clock->currentTime());
774 }
775
776 MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
777 {
778     return m_networkState;
779 }
780
781 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::readyState() const
782 {
783     return m_readyState;
784 }
785
786 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState()
787 {
788     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->active() || !m_mediaStreamPrivate->tracks().size())
789         return MediaPlayer::ReadyState::HaveNothing;
790
791     bool allTracksAreLive = true;
792     for (auto& track : m_mediaStreamPrivate->tracks()) {
793         if (!track->enabled() || track->readyState() != MediaStreamTrackPrivate::ReadyState::Live)
794             allTracksAreLive = false;
795
796         if (track == m_mediaStreamPrivate->activeVideoTrack() && !m_imagePainter.mediaSample) {
797             if (!m_haveSeenMetadata || m_waitingForFirstImage)
798                 return MediaPlayer::ReadyState::HaveNothing;
799             allTracksAreLive = false;
800         }
801     }
802
803     if (m_waitingForFirstImage || (!allTracksAreLive && !m_haveSeenMetadata))
804         return MediaPlayer::ReadyState::HaveMetadata;
805
806     return MediaPlayer::ReadyState::HaveEnoughData;
807 }
808
809 void MediaPlayerPrivateMediaStreamAVFObjC::updateReadyState()
810 {
811     MediaPlayer::ReadyState newReadyState = currentReadyState();
812
813     if (newReadyState != m_readyState) {
814         ALWAYS_LOG(LOGIDENTIFIER, "updated to ", (int)newReadyState);
815         setReadyState(newReadyState);
816     }
817 }
818
819 void MediaPlayerPrivateMediaStreamAVFObjC::activeStatusChanged()
820 {
821     scheduleDeferredTask([this] {
822         bool ended = !m_mediaStreamPrivate->active();
823         if (ended && playing())
824             pause();
825
826         updateReadyState();
827         updateDisplayMode();
828
829         if (ended != m_ended) {
830             m_ended = ended;
831             if (m_player) {
832                 m_player->timeChanged();
833                 m_player->characteristicChanged();
834             }
835         }
836     });
837 }
838
839 void MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode()
840 {
841     if (!updateDisplayMode())
842         return;
843
844     scheduleDeferredTask([this] {
845         m_transformIsValid = false;
846         if (m_player)
847             m_player->client().mediaPlayerRenderingModeChanged(m_player);
848     });
849
850 }
851
852 void MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged()
853 {
854     bool sizeChanged = false;
855
856     FloatSize intrinsicSize = m_mediaStreamPrivate->intrinsicSize();
857     if (intrinsicSize.height() != m_intrinsicSize.height() || intrinsicSize.width() != m_intrinsicSize.width()) {
858         m_intrinsicSize = intrinsicSize;
859         sizeChanged = true;
860         if (m_playbackState == PlaybackState::None)
861             m_playbackState = PlaybackState::Paused;
862     }
863
864     updateTracks();
865     updateDisplayMode();
866
867     scheduleDeferredTask([this, sizeChanged] {
868         updateReadyState();
869
870         if (!m_player)
871             return;
872
873         m_player->characteristicChanged();
874         if (sizeChanged) {
875             m_player->sizeChanged();
876         }
877     });
878 }
879
880 void MediaPlayerPrivateMediaStreamAVFObjC::didAddTrack(MediaStreamTrackPrivate&)
881 {
882     updateTracks();
883 }
884
885 void MediaPlayerPrivateMediaStreamAVFObjC::didRemoveTrack(MediaStreamTrackPrivate&)
886 {
887     updateTracks();
888 }
889
890 void MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated(MediaStreamTrackPrivate& track, MediaSample& mediaSample)
891 {
892     ASSERT(track.id() == mediaSample.trackID());
893     ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
894     ASSERT(m_mediaStreamPrivate);
895
896     if (streamTime().toDouble() < 0)
897         return;
898
899     switch (track.type()) {
900     case RealtimeMediaSource::Type::None:
901         // Do nothing.
902         break;
903     case RealtimeMediaSource::Type::Audio:
904         break;
905     case RealtimeMediaSource::Type::Video:
906         if (&track == m_activeVideoTrack.get())
907             enqueueVideoSample(track, mediaSample);
908         break;
909     }
910 }
911
912 void MediaPlayerPrivateMediaStreamAVFObjC::readyStateChanged(MediaStreamTrackPrivate&)
913 {
914     scheduleDeferredTask([this] {
915         updateReadyState();
916     });
917 }
918
919 bool MediaPlayerPrivateMediaStreamAVFObjC::supportsPictureInPicture() const
920 {
921 #if PLATFORM(IOS_FAMILY)
922     for (const auto& track : m_videoTrackMap.values()) {
923         if (track->streamTrack().isCaptureTrack())
924             return false;
925     }
926 #endif
927     
928     return true;
929 }
930
931 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
932 {
933     updateCurrentFrameImage();
934     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_imagePainter.cgImage);
935 }
936
937 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenFrame(FloatRect frame)
938 {
939     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
940 }
941
942 typedef enum {
943     Add,
944     Remove,
945     Configure
946 } TrackState;
947
948 template <typename RefT>
949 void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), const Function<void(typename RefT::ValueType&, int, TrackState)>& configureTrack)
950 {
951     Vector<RefT> removedTracks;
952     Vector<RefT> addedTracks;
953     Vector<RefPtr<MediaStreamTrackPrivate>> addedPrivateTracks;
954
955     for (const auto& track : currentTracks) {
956         if (track->type() != trackType)
957             continue;
958
959         if (!trackMap.contains(track->id()))
960             addedPrivateTracks.append(track);
961     }
962
963     for (const auto& track : trackMap.values()) {
964         auto& streamTrack = track->streamTrack();
965         if (currentTracks.contains(&streamTrack))
966             continue;
967
968         removedTracks.append(track);
969     }
970     for (auto& track : removedTracks)
971         trackMap.remove(track->streamTrack().id());
972
973     for (auto& track : addedPrivateTracks) {
974         RefT newTrack = itemFactory(*track.get());
975         trackMap.add(track->id(), newTrack);
976         addedTracks.append(newTrack);
977     }
978
979     int index = 0;
980     for (auto& track : removedTracks)
981         configureTrack(*track, index++, TrackState::Remove);
982
983     index = 0;
984     for (auto& track : addedTracks)
985         configureTrack(*track, index++, TrackState::Add);
986
987     index = 0;
988     for (const auto& track : trackMap.values())
989         configureTrack(*track, index++, TrackState::Configure);
990 }
991
992 void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
993 {
994     if (m_pendingSelectedTrackCheck)
995         return;
996
997     m_pendingSelectedTrackCheck = true;
998     scheduleDeferredTask([this] {
999         auto oldVideoTrack = m_activeVideoTrack;
1000         bool hideVideoLayer = true;
1001         m_activeVideoTrack = nullptr;
1002         if (m_mediaStreamPrivate->activeVideoTrack()) {
1003             for (const auto& track : m_videoTrackMap.values()) {
1004                 if (&track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack()) {
1005                     m_activeVideoTrack = m_mediaStreamPrivate->activeVideoTrack();
1006                     if (track->selected())
1007                         hideVideoLayer = false;
1008                     break;
1009                 }
1010             }
1011         }
1012
1013         if (oldVideoTrack != m_activeVideoTrack) {
1014             m_imagePainter.reset();
1015             if (m_displayMode == None)
1016                 m_waitingForFirstImage = true;
1017         }
1018         ensureLayers();
1019         m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer || m_displayMode < PausedImage;
1020         m_backgroundLayer.get().hidden = hideBackgroundLayer();
1021
1022         m_pendingSelectedTrackCheck = false;
1023         updateDisplayMode();
1024     });
1025 }
1026
1027 void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
1028 {
1029     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
1030
1031     auto setAudioTrackState = [this](AudioTrackPrivateMediaStreamCocoa& track, int index, TrackState state)
1032     {
1033         switch (state) {
1034         case TrackState::Remove:
1035             track.streamTrack().removeObserver(*this);
1036             m_player->removeAudioTrack(track);
1037             break;
1038         case TrackState::Add:
1039             track.streamTrack().addObserver(*this);
1040             m_player->addAudioTrack(track);
1041             break;
1042         case TrackState::Configure:
1043             track.setTrackIndex(index);
1044             bool enabled = track.streamTrack().enabled() && !track.streamTrack().muted();
1045             track.setEnabled(enabled);
1046             break;
1047         }
1048     };
1049     updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Type::Audio, currentTracks, &AudioTrackPrivateMediaStreamCocoa::create, WTFMove(setAudioTrackState));
1050
1051     auto setVideoTrackState = [this](VideoTrackPrivateMediaStream& track, int index, TrackState state)
1052     {
1053         switch (state) {
1054         case TrackState::Remove:
1055             track.streamTrack().removeObserver(*this);
1056             m_player->removeVideoTrack(track);
1057             checkSelectedVideoTrack();
1058             break;
1059         case TrackState::Add:
1060             track.streamTrack().addObserver(*this);
1061             m_player->addVideoTrack(track);
1062             break;
1063         case TrackState::Configure:
1064             track.setTrackIndex(index);
1065             bool selected = &track.streamTrack() == m_mediaStreamPrivate->activeVideoTrack();
1066             track.setSelected(selected);
1067             checkSelectedVideoTrack();
1068             break;
1069         }
1070     };
1071     updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Type::Video, currentTracks, &VideoTrackPrivateMediaStream::create, WTFMove(setVideoTrackState));
1072 }
1073
1074 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
1075 {
1076     return std::make_unique<PlatformTimeRanges>();
1077 }
1078
1079 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::buffered() const
1080 {
1081     return std::make_unique<PlatformTimeRanges>();
1082 }
1083
1084 void MediaPlayerPrivateMediaStreamAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
1085 {
1086     paintCurrentFrameInContext(context, rect);
1087 }
1088
1089 void MediaPlayerPrivateMediaStreamAVFObjC::updateCurrentFrameImage()
1090 {
1091     if (m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1092         return;
1093
1094     if (!m_imagePainter.pixelBufferConformer)
1095         m_imagePainter.pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)@{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) });
1096
1097     ASSERT(m_imagePainter.pixelBufferConformer);
1098     if (!m_imagePainter.pixelBufferConformer)
1099         return;
1100
1101     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_imagePainter.mediaSample->platformSample().sample.cmSampleBuffer));
1102     m_imagePainter.cgImage = m_imagePainter.pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer);
1103 }
1104
1105 void MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& destRect)
1106 {
1107     if (m_displayMode == None || !metaDataAvailable() || context.paintingDisabled())
1108         return;
1109
1110     if (m_displayMode != PaintItBlack && m_imagePainter.mediaSample)
1111         updateCurrentFrameImage();
1112
1113     GraphicsContextStateSaver stateSaver(context);
1114     if (m_displayMode == PaintItBlack) {
1115         context.fillRect(IntRect(IntPoint(), IntSize(destRect.width(), destRect.height())), Color::black);
1116         return;
1117     }
1118
1119     if (!m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1120         return;
1121
1122     auto image = m_imagePainter.cgImage.get();
1123     FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
1124     AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
1125     FloatRect transformedDestRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(destRect);
1126     context.concatCTM(videoTransform);
1127     context.drawNativeImage(image, imageRect.size(), transformedDestRect, imageRect);
1128 }
1129
1130 void MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged()
1131 {
1132     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
1133         ensureLayers();
1134     else
1135         destroyLayers();
1136 }
1137
1138 String MediaPlayerPrivateMediaStreamAVFObjC::engineDescription() const
1139 {
1140     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaStream Engine"));
1141     return description;
1142 }
1143
1144 void MediaPlayerPrivateMediaStreamAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1145 {
1146     if (m_readyState == readyState)
1147         return;
1148
1149     if (readyState != MediaPlayer::ReadyState::HaveNothing)
1150         m_haveSeenMetadata = true;
1151     m_readyState = readyState;
1152     characteristicsChanged();
1153
1154     m_player->readyStateChanged();
1155 }
1156
1157 void MediaPlayerPrivateMediaStreamAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1158 {
1159     if (m_networkState == networkState)
1160         return;
1161
1162     m_networkState = networkState;
1163     m_player->networkStateChanged();
1164 }
1165
1166 void MediaPlayerPrivateMediaStreamAVFObjC::setShouldBufferData(bool shouldBuffer)
1167 {
1168     if (!shouldBuffer)
1169         flushAndRemoveVideoSampleBuffers();
1170 }
1171
1172 void MediaPlayerPrivateMediaStreamAVFObjC::scheduleDeferredTask(Function<void ()>&& function)
1173 {
1174     ASSERT(function);
1175     callOnMainThread([weakThis = makeWeakPtr(*this), function = WTFMove(function)] {
1176         if (!weakThis)
1177             return;
1178
1179         function();
1180     });
1181 }
1182
1183 void MediaPlayerPrivateMediaStreamAVFObjC::CurrentFramePainter::reset()
1184 {
1185     cgImage = nullptr;
1186     mediaSample = nullptr;
1187     pixelBufferConformer = nullptr;
1188 }
1189
1190 void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
1191 {
1192     if (!m_backgroundLayer || !m_sampleBufferDisplayLayer)
1193         return;
1194
1195     auto backgroundBounds = m_backgroundLayer.get().bounds;
1196     auto videoBounds = backgroundBounds;
1197     if (m_videoRotation == MediaSample::VideoRotation::Right || m_videoRotation == MediaSample::VideoRotation::Left)
1198         std::swap(videoBounds.size.width, videoBounds.size.height);
1199
1200     m_sampleBufferDisplayLayer.get().bounds = videoBounds;
1201     m_sampleBufferDisplayLayer.get().position = { backgroundBounds.size.width / 2, backgroundBounds.size.height / 2};
1202 }
1203
1204 void MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayerBoundsChanged()
1205 {
1206     runWithoutAnimations([this] {
1207         updateDisplayLayer();
1208     });
1209 }
1210
1211 #if !RELEASE_LOG_DISABLED
1212 WTFLogChannel& MediaPlayerPrivateMediaStreamAVFObjC::logChannel() const
1213 {
1214     return LogMedia;
1215 }
1216 #endif
1217
1218 }
1219
1220 #endif