WeakPtrFactory should populate m_ref lazily.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaStreamAVFObjC.mm
1 /*
2  * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaStreamAVFObjC.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AudioTrackPrivateMediaStreamCocoa.h"
32 #import "Clock.h"
33 #import "GraphicsContextCG.h"
34 #import "Logging.h"
35 #import "MediaStreamPrivate.h"
36 #import "PixelBufferConformerCV.h"
37 #import "VideoTrackPrivateMediaStream.h"
38 #import <AVFoundation/AVSampleBufferDisplayLayer.h>
39 #import <QuartzCore/CALayer.h>
40 #import <QuartzCore/CATransaction.h>
41 #import <objc_runtime.h>
42 #import <pal/avfoundation/MediaTimeAVFoundation.h>
43 #import <pal/spi/mac/AVFoundationSPI.h>
44 #import <wtf/Function.h>
45 #import <wtf/MainThread.h>
46 #import <wtf/NeverDestroyed.h>
47
48 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
49 #import "VideoFullscreenLayerManager.h"
50 #endif
51
52 #pragma mark - Soft Linking
53
54 #import "CoreMediaSoftLink.h"
55 #import "CoreVideoSoftLink.h"
56
57 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
58
59 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
60
61 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
62 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
63 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
64
65 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
66 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
67 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
68
69 using namespace WebCore;
70
71 @interface WebAVSampleBufferStatusChangeListener : NSObject {
72     MediaPlayerPrivateMediaStreamAVFObjC* _parent;
73 }
74
75 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
76 - (void)invalidate;
77 - (void)beginObservingLayers;
78 - (void)stopObservingLayers;
79 @end
80
81 @implementation WebAVSampleBufferStatusChangeListener
82
83 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)parent
84 {
85     if (!(self = [super init]))
86         return nil;
87
88     _parent = parent;
89
90     return self;
91 }
92
93 - (void)dealloc
94 {
95     [self invalidate];
96     [super dealloc];
97 }
98
99 - (void)invalidate
100 {
101     [self stopObservingLayers];
102     _parent = nullptr;
103 }
104
105 - (void)beginObservingLayers
106 {
107     ASSERT(_parent);
108     ASSERT(_parent->displayLayer());
109     ASSERT(_parent->backgroundLayer());
110
111     [_parent->displayLayer() addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
112     [_parent->displayLayer() addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nil];
113     [_parent->backgroundLayer() addObserver:self forKeyPath:@"bounds" options:NSKeyValueObservingOptionNew context:nil];
114 }
115
116 - (void)stopObservingLayers
117 {
118     if (!_parent)
119         return;
120
121     if (_parent->displayLayer()) {
122         [_parent->displayLayer() removeObserver:self forKeyPath:@"status"];
123         [_parent->displayLayer() removeObserver:self forKeyPath:@"error"];
124     }
125     if (_parent->backgroundLayer())
126         [_parent->backgroundLayer() removeObserver:self forKeyPath:@"bounds"];
127 }
128
129 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
130 {
131     UNUSED_PARAM(context);
132     UNUSED_PARAM(keyPath);
133     ASSERT(_parent);
134
135     if (!_parent)
136         return;
137
138     if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
139         RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
140         ASSERT(layer.get() == _parent->displayLayer());
141
142         if ([keyPath isEqualToString:@"status"]) {
143             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
144             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
145                 if (!protectedSelf->_parent)
146                     return;
147
148                 protectedSelf->_parent->layerStatusDidChange(layer.get());
149             });
150             return;
151         }
152
153         if ([keyPath isEqualToString:@"error"]) {
154             RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
155             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self), layer = WTFMove(layer), status = WTFMove(status)] {
156                 if (!protectedSelf->_parent)
157                     return;
158
159                 protectedSelf->_parent->layerErrorDidChange(layer.get());
160             });
161             return;
162         }
163     }
164
165     if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
166         return;
167
168     if ((CALayer *)object == _parent->backgroundLayer()) {
169         if ([keyPath isEqualToString:@"bounds"]) {
170             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
171                 if (!protectedSelf->_parent)
172                     return;
173
174                 protectedSelf->_parent->backgroundLayerBoundsChanged();
175             });
176         }
177     }
178
179 }
180 @end
181
182 namespace WebCore {
183
184 #pragma mark -
185 #pragma mark MediaPlayerPrivateMediaStreamAVFObjC
186
187 static const double rendererLatency = 0.02;
188
189 MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer* player)
190     : m_player(player)
191     , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
192     , m_clock(Clock::create())
193 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
194     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
195 #endif
196 {
197     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(%p)", this);
198 }
199
200 MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
201 {
202     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC(%p)", this);
203
204     [m_statusChangeListener invalidate];
205
206     for (const auto& track : m_audioTrackMap.values())
207         track->pause();
208
209     if (m_mediaStreamPrivate) {
210         m_mediaStreamPrivate->removeObserver(*this);
211
212         for (auto& track : m_mediaStreamPrivate->tracks())
213             track->removeObserver(*this);
214     }
215
216     destroyLayers();
217
218     m_audioTrackMap.clear();
219     m_videoTrackMap.clear();
220 }
221
222 #pragma mark -
223 #pragma mark MediaPlayer Factory Methods
224
225 void MediaPlayerPrivateMediaStreamAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
226 {
227     if (isAvailable())
228         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaStreamAVFObjC>(player); }, getSupportedTypes,
229             supportsType, 0, 0, 0, 0);
230 }
231
232 bool MediaPlayerPrivateMediaStreamAVFObjC::isAvailable()
233 {
234     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable() && getAVSampleBufferDisplayLayerClass();
235 }
236
237 void MediaPlayerPrivateMediaStreamAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
238 {
239     // FIXME: Is it really correct to list no supported types?
240     types.clear();
241 }
242
243 MediaPlayer::SupportsType MediaPlayerPrivateMediaStreamAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
244 {
245     return parameters.isMediaStream ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
246 }
247
248 #pragma mark -
249 #pragma mark AVSampleBuffer Methods
250
251 void MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue(PendingSampleQueue& queue)
252 {
253     if (queue.isEmpty())
254         return;
255
256     auto decodeTime = queue.first()->decodeTime();
257     if (!decodeTime.isValid() || decodeTime < MediaTime::zeroTime()) {
258         while (queue.size() > 5)
259             queue.removeFirst();
260
261         return;
262     }
263
264     MediaTime now = streamTime();
265     while (!queue.isEmpty()) {
266         if (queue.first()->decodeTime() > now)
267             break;
268         queue.removeFirst();
269     }
270 }
271
272 void MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue(PendingSampleQueue& queue, MediaSample& sample)
273 {
274     removeOldSamplesFromPendingQueue(queue);
275     queue.append(sample);
276 }
277
278 void MediaPlayerPrivateMediaStreamAVFObjC::updateSampleTimes(MediaSample& sample, const MediaTime& timelineOffset, const char* loggingPrefix)
279 {
280     LOG(MediaCaptureSamples, "%s(%p): original sample = %s", loggingPrefix, this, toString(sample).utf8().data());
281     sample.offsetTimestampsBy(timelineOffset);
282     LOG(MediaCaptureSamples, "%s(%p): adjusted sample = %s", loggingPrefix, this, toString(sample).utf8().data());
283
284 #if !LOG_DISABLED
285     MediaTime now = streamTime();
286     double delta = (sample.presentationTime() - now).toDouble();
287     if (delta < 0)
288         LOG(Media, "%s(%p): *NOTE* audio sample at time %s is %f seconds late", loggingPrefix, this, toString(now).utf8().data(), -delta);
289     else if (delta < .01)
290         LOG(Media, "%s(%p): *NOTE* audio sample at time %s is only %f seconds early", loggingPrefix, this, toString(now).utf8().data(), delta);
291     else if (delta > .3)
292         LOG(Media, "%s(%p): *NOTE* audio sample at time %s is %f seconds early!", loggingPrefix, this, toString(now).utf8().data(), delta);
293 #else
294     UNUSED_PARAM(loggingPrefix);
295 #endif
296
297 }
298
299 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::calculateTimelineOffset(const MediaSample& sample, double latency)
300 {
301     MediaTime sampleTime = sample.outputPresentationTime();
302     if (!sampleTime || !sampleTime.isValid())
303         sampleTime = sample.presentationTime();
304     MediaTime timelineOffset = streamTime() - sampleTime + MediaTime::createWithDouble(latency);
305     if (timelineOffset.timeScale() != sampleTime.timeScale())
306         timelineOffset = PAL::toMediaTime(CMTimeConvertScale(PAL::toCMTime(timelineOffset), sampleTime.timeScale(), kCMTimeRoundingMethod_Default));
307     return timelineOffset;
308 }
309
310 CGAffineTransform MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix(MediaSample& sample, bool forceUpdate)
311 {
312     if (!forceUpdate && m_transformIsValid)
313         return m_videoTransform;
314
315     CMSampleBufferRef sampleBuffer = sample.platformSample().sample.cmSampleBuffer;
316     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sampleBuffer));
317     size_t width = CVPixelBufferGetWidth(pixelBuffer);
318     size_t height = CVPixelBufferGetHeight(pixelBuffer);
319     if (!width || !height)
320         return CGAffineTransformIdentity;
321
322     ASSERT(m_videoRotation >= MediaSample::VideoRotation::None);
323     ASSERT(m_videoRotation <= MediaSample::VideoRotation::Left);
324
325     m_videoTransform = CGAffineTransformMakeRotation(static_cast<int>(m_videoRotation) * M_PI / 180);
326     if (sample.videoMirrored())
327         m_videoTransform = CGAffineTransformScale(m_videoTransform, -1, 1);
328
329     m_transformIsValid = true;
330     return m_videoTransform;
331 }
332
333 static void runWithoutAnimations(const WTF::Function<void()>& function)
334 {
335     [CATransaction begin];
336     [CATransaction setAnimationDuration:0];
337     [CATransaction setDisableActions:YES];
338     function();
339     [CATransaction commit];
340 }
341
342 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate& track, MediaSample& sample)
343 {
344     ASSERT(m_videoTrackMap.contains(track.id()));
345
346     if (&track != m_mediaStreamPrivate->activeVideoTrack())
347         return;
348
349     if (!m_imagePainter.mediaSample || m_displayMode != PausedImage) {
350         m_imagePainter.mediaSample = &sample;
351         m_imagePainter.cgImage = nullptr;
352         if (m_readyState < MediaPlayer::ReadyState::HaveEnoughData)
353             updateReadyState();
354     }
355
356     if (m_displayMode != LivePreview || (m_displayMode == PausedImage && m_imagePainter.mediaSample))
357         return;
358
359     auto videoTrack = m_videoTrackMap.get(track.id());
360     MediaTime timelineOffset = videoTrack->timelineOffset();
361     if (timelineOffset == MediaTime::invalidTime()) {
362         timelineOffset = calculateTimelineOffset(sample, rendererLatency);
363         videoTrack->setTimelineOffset(timelineOffset);
364         LOG(MediaCaptureSamples, "MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample: timeline offset for track %s set to %s", track.id().utf8().data(), toString(timelineOffset).utf8().data());
365     }
366
367     updateSampleTimes(sample, timelineOffset, "MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample");
368
369     if (m_sampleBufferDisplayLayer) {
370         if (sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored) {
371             m_videoRotation = sample.videoRotation();
372             m_videoMirrored = sample.videoMirrored();
373             runWithoutAnimations([this, &sample] {
374                 m_sampleBufferDisplayLayer.get().affineTransform = videoTransformationMatrix(sample, true);
375                 updateDisplayLayer();
376             });
377         }
378
379         if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
380             addSampleToPendingQueue(m_pendingVideoSampleQueue, sample);
381             requestNotificationWhenReadyForVideoData();
382             return;
383         }
384
385         [m_sampleBufferDisplayLayer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
386     }
387
388     if (!m_hasEverEnqueuedVideoFrame) {
389         m_hasEverEnqueuedVideoFrame = true;
390         m_player->firstVideoFrameAvailable();
391     }
392 }
393
394 void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
395 {
396     auto weakThis = createWeakPtr();
397     [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
398         if (!weakThis)
399             return;
400
401         [m_sampleBufferDisplayLayer stopRequestingMediaData];
402
403         while (!m_pendingVideoSampleQueue.isEmpty()) {
404             if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
405                 requestNotificationWhenReadyForVideoData();
406                 return;
407             }
408
409             auto sample = m_pendingVideoSampleQueue.takeFirst();
410             enqueueVideoSample(*m_activeVideoTrack.get(), sample.get());
411         }
412     }];
413 }
414
415 AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
416 {
417     // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
418     return nullptr;
419 }
420
421 void MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(AVSampleBufferDisplayLayer* layer)
422 {
423     UNUSED_PARAM(layer);
424     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::layerErrorDidChange(%p) - error = %s", this, [[layer.error localizedDescription] UTF8String]);
425 }
426
427 void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer)
428 {
429     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(%p) - status = %d", this, (int)layer.status);
430
431     if (layer.status != AVQueuedSampleBufferRenderingStatusRendering)
432         return;
433     if (!m_sampleBufferDisplayLayer || !m_activeVideoTrack || layer != m_sampleBufferDisplayLayer)
434         return;
435
436     auto track = m_videoTrackMap.get(m_activeVideoTrack->id());
437     if (track)
438         track->setTimelineOffset(MediaTime::invalidTime());
439 }
440
441 void MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers()
442 {
443     if (m_sampleBufferDisplayLayer)
444         [m_sampleBufferDisplayLayer flush];
445 }
446
447 void MediaPlayerPrivateMediaStreamAVFObjC::flushAndRemoveVideoSampleBuffers()
448 {
449     [m_sampleBufferDisplayLayer flushAndRemoveImage];
450 }
451
452 void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
453 {
454     if (m_sampleBufferDisplayLayer)
455         return;
456
457     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->activeVideoTrack() || !m_mediaStreamPrivate->activeVideoTrack()->enabled())
458         return;
459
460     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
461     if (!m_sampleBufferDisplayLayer) {
462         LOG_ERROR("MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers: +[AVSampleBufferDisplayLayer alloc] failed.");
463         return;
464     }
465
466     m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
467     m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
468     m_sampleBufferDisplayLayer.get().needsDisplayOnBoundsChange = YES;
469     m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
470
471     m_backgroundLayer = adoptNS([[CALayer alloc] init]);
472     m_backgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
473     m_backgroundLayer.get().needsDisplayOnBoundsChange = YES;
474
475     [m_statusChangeListener beginObservingLayers];
476
477     [m_backgroundLayer addSublayer:m_sampleBufferDisplayLayer.get()];
478
479 #ifndef NDEBUG
480     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer"];
481     [m_backgroundLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer parent"];
482 #endif
483
484     updateRenderingMode();
485     updateDisplayLayer();
486
487 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
488     m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
489 #endif
490 }
491
492 void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
493 {
494     [m_statusChangeListener stopObservingLayers];
495     if (m_sampleBufferDisplayLayer) {
496         m_pendingVideoSampleQueue.clear();
497         [m_sampleBufferDisplayLayer stopRequestingMediaData];
498         [m_sampleBufferDisplayLayer flush];
499         m_sampleBufferDisplayLayer = nullptr;
500     }
501     m_backgroundLayer = nullptr;
502
503     updateRenderingMode();
504     
505 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
506     m_videoFullscreenLayerManager->didDestroyVideoLayer();
507 #endif
508 }
509
510 #pragma mark -
511 #pragma mark MediaPlayerPrivateInterface Overrides
512
513 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&)
514 {
515     // This media engine only supports MediaStream URLs.
516     scheduleDeferredTask([this] {
517         setNetworkState(MediaPlayer::FormatError);
518     });
519 }
520
521 #if ENABLE(MEDIA_SOURCE)
522 void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&, MediaSourcePrivateClient*)
523 {
524     // This media engine only supports MediaStream URLs.
525     scheduleDeferredTask([this] {
526         setNetworkState(MediaPlayer::FormatError);
527     });
528 }
529 #endif
530
531 void MediaPlayerPrivateMediaStreamAVFObjC::load(MediaStreamPrivate& stream)
532 {
533     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::load(%p)", this);
534
535     m_intrinsicSize = FloatSize();
536
537     m_mediaStreamPrivate = &stream;
538     m_mediaStreamPrivate->addObserver(*this);
539     m_ended = !m_mediaStreamPrivate->active();
540
541     scheduleDeferredTask([this] {
542         updateTracks();
543         setNetworkState(MediaPlayer::Idle);
544         updateReadyState();
545     });
546 }
547
548 bool MediaPlayerPrivateMediaStreamAVFObjC::didPassCORSAccessCheck() const
549 {
550     // We are only doing a check on the active video track since the sole consumer of this API is canvas.
551     // FIXME: We should change the name of didPassCORSAccessCheck if it is expected to stay like this.
552     const auto* track = m_mediaStreamPrivate->activeVideoTrack();
553     return !track || !track->isIsolated();
554 }
555
556 void MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad()
557 {
558     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::cancelLoad(%p)", this);
559     if (playing())
560         pause();
561 }
562
563 void MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay()
564 {
565     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::prepareToPlay(%p)", this);
566 }
567
568 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
569 {
570     if (!m_backgroundLayer || m_displayMode == None)
571         return nullptr;
572
573 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
574     return m_videoFullscreenLayerManager->videoInlineLayer();
575 #else
576     return m_backgroundLayer.get();
577 #endif
578 }
579
580 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::displayLayer()
581 {
582     return m_sampleBufferDisplayLayer.get();
583 }
584
585 PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayer()
586 {
587     return m_backgroundLayer.get();
588 }
589
590 MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
591 {
592     if (m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
593         return None;
594
595     if (auto* track = m_mediaStreamPrivate->activeVideoTrack()) {
596         if (!track->enabled() || track->muted() || track->ended())
597             return PaintItBlack;
598     }
599
600     if (playing() && !m_ended) {
601         if (!m_mediaStreamPrivate->isProducingData())
602             return PausedImage;
603         return LivePreview;
604     }
605
606     if (m_playbackState == PlaybackState::None || m_ended)
607         return PaintItBlack;
608
609     return PausedImage;
610 }
611
612 bool MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode()
613 {
614     DisplayMode displayMode = currentDisplayMode();
615
616     if (displayMode == m_displayMode)
617         return false;
618     m_displayMode = displayMode;
619
620     if (m_sampleBufferDisplayLayer) {
621         runWithoutAnimations([this] {
622             m_sampleBufferDisplayLayer.get().hidden = m_displayMode < PausedImage;
623         });
624     }
625
626     return true;
627 }
628
629 void MediaPlayerPrivateMediaStreamAVFObjC::play()
630 {
631     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::play(%p)", this);
632
633     if (!metaDataAvailable() || playing() || m_ended)
634         return;
635
636     m_playbackState = PlaybackState::Playing;
637     if (!m_clock->isRunning())
638         m_clock->start();
639
640     for (const auto& track : m_audioTrackMap.values())
641         track->play();
642
643     m_shouldDisplayFirstVideoFrame = true;
644     updateDisplayMode();
645
646     scheduleDeferredTask([this] {
647         updateReadyState();
648         if (m_player)
649             m_player->rateChanged();
650     });
651 }
652
653 void MediaPlayerPrivateMediaStreamAVFObjC::pause()
654 {
655     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::pause(%p)", this);
656
657     if (!metaDataAvailable() || !playing() || m_ended)
658         return;
659
660     m_pausedTime = currentMediaTime();
661     m_playbackState = PlaybackState::Paused;
662
663     for (const auto& track : m_audioTrackMap.values())
664         track->pause();
665
666     updateDisplayMode();
667     flushRenderers();
668
669     scheduleDeferredTask([this] {
670         if (m_player)
671             m_player->rateChanged();
672     });
673 }
674
675 void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
676 {
677     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::setVolume(%p)", this);
678
679     if (m_volume == volume)
680         return;
681
682     m_volume = volume;
683     for (const auto& track : m_audioTrackMap.values())
684         track->setVolume(m_muted ? 0 : m_volume);
685 }
686
687 void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
688 {
689     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::setMuted(%p)", this);
690
691     if (muted == m_muted)
692         return;
693
694     m_muted = muted;
695     for (const auto& track : m_audioTrackMap.values())
696         track->setVolume(m_muted ? 0 : m_volume);
697 }
698
699 bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
700 {
701     if (!metaDataAvailable())
702         return false;
703     
704     return m_mediaStreamPrivate->hasVideo();
705 }
706
707 bool MediaPlayerPrivateMediaStreamAVFObjC::hasAudio() const
708 {
709     if (!metaDataAvailable())
710         return false;
711     
712     return m_mediaStreamPrivate->hasAudio();
713 }
714
715 void MediaPlayerPrivateMediaStreamAVFObjC::setVisible(bool visible)
716 {
717     if (m_visible == visible)
718         return;
719
720     m_visible = visible;
721     if (m_visible)
722         flushRenderers();
723 }
724
725 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::durationMediaTime() const
726 {
727     return MediaTime::positiveInfiniteTime();
728 }
729
730 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::currentMediaTime() const
731 {
732     if (paused())
733         return m_pausedTime;
734
735     return streamTime();
736 }
737
738 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
739 {
740     return MediaTime::createWithDouble(m_clock->currentTime());
741 }
742
743 MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
744 {
745     return m_networkState;
746 }
747
748 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::readyState() const
749 {
750     return m_readyState;
751 }
752
753 MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState()
754 {
755     if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->active() || !m_mediaStreamPrivate->tracks().size())
756         return MediaPlayer::ReadyState::HaveNothing;
757
758     bool allTracksAreLive = true;
759     for (auto& track : m_mediaStreamPrivate->tracks()) {
760         if (!track->enabled() || track->readyState() != MediaStreamTrackPrivate::ReadyState::Live)
761             allTracksAreLive = false;
762
763         if (track == m_mediaStreamPrivate->activeVideoTrack() && !m_imagePainter.mediaSample) {
764             if (!m_haveSeenMetadata)
765                 return MediaPlayer::ReadyState::HaveNothing;
766             allTracksAreLive = false;
767         }
768     }
769
770     if (!allTracksAreLive && !m_haveSeenMetadata)
771         return MediaPlayer::ReadyState::HaveMetadata;
772
773     return MediaPlayer::ReadyState::HaveEnoughData;
774 }
775
776 void MediaPlayerPrivateMediaStreamAVFObjC::updateReadyState()
777 {
778     MediaPlayer::ReadyState newReadyState = currentReadyState();
779
780     if (newReadyState != m_readyState)
781         setReadyState(newReadyState);
782 }
783
784 void MediaPlayerPrivateMediaStreamAVFObjC::activeStatusChanged()
785 {
786     scheduleDeferredTask([this] {
787         bool ended = !m_mediaStreamPrivate->active();
788         if (ended && playing())
789             pause();
790
791         updateReadyState();
792         updateDisplayMode();
793
794         if (ended != m_ended) {
795             m_ended = ended;
796             if (m_player) {
797                 m_player->timeChanged();
798                 m_player->characteristicChanged();
799             }
800         }
801     });
802 }
803
804 void MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode()
805 {
806     if (!updateDisplayMode())
807         return;
808
809     scheduleDeferredTask([this] {
810         m_transformIsValid = false;
811         if (m_player)
812             m_player->client().mediaPlayerRenderingModeChanged(m_player);
813     });
814
815 }
816
817 void MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged()
818 {
819     bool sizeChanged = false;
820
821     FloatSize intrinsicSize = m_mediaStreamPrivate->intrinsicSize();
822     if (intrinsicSize.height() != m_intrinsicSize.height() || intrinsicSize.width() != m_intrinsicSize.width()) {
823         m_intrinsicSize = intrinsicSize;
824         sizeChanged = true;
825     }
826
827     updateTracks();
828     updateDisplayMode();
829
830     scheduleDeferredTask([this, sizeChanged] {
831         updateReadyState();
832
833         if (!m_player)
834             return;
835
836         m_player->characteristicChanged();
837         if (sizeChanged) {
838             m_player->sizeChanged();
839         }
840     });
841 }
842
843 void MediaPlayerPrivateMediaStreamAVFObjC::didAddTrack(MediaStreamTrackPrivate&)
844 {
845     updateTracks();
846 }
847
848 void MediaPlayerPrivateMediaStreamAVFObjC::didRemoveTrack(MediaStreamTrackPrivate&)
849 {
850     updateTracks();
851 }
852
853 void MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated(MediaStreamTrackPrivate& track, MediaSample& mediaSample)
854 {
855     ASSERT(track.id() == mediaSample.trackID());
856     ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
857     ASSERT(m_mediaStreamPrivate);
858
859     if (streamTime().toDouble() < 0)
860         return;
861
862     switch (track.type()) {
863     case RealtimeMediaSource::Type::None:
864         // Do nothing.
865         break;
866     case RealtimeMediaSource::Type::Audio:
867         break;
868     case RealtimeMediaSource::Type::Video:
869         if (&track == m_activeVideoTrack.get())
870             enqueueVideoSample(track, mediaSample);
871         break;
872     }
873 }
874
875 void MediaPlayerPrivateMediaStreamAVFObjC::readyStateChanged(MediaStreamTrackPrivate&)
876 {
877     scheduleDeferredTask([this] {
878         updateReadyState();
879     });
880 }
881
882 bool MediaPlayerPrivateMediaStreamAVFObjC::supportsPictureInPicture() const
883 {
884 #if PLATFORM(IOS)
885     for (const auto& track : m_videoTrackMap.values()) {
886         if (track->streamTrack().isCaptureTrack())
887             return false;
888     }
889 #endif
890     
891     return true;
892 }
893
894 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
895 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
896 {
897     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
898         completionHandler();
899         return;
900     }
901
902     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
903 }
904
905 void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenFrame(FloatRect frame)
906 {
907     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
908 }
909 #endif
910
911 typedef enum {
912     Add,
913     Remove,
914     Configure
915 } TrackState;
916
917 template <typename RefT>
918 void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), const Function<void(RefT, int, TrackState)>& configureTrack)
919 {
920     Vector<RefT> removedTracks;
921     Vector<RefT> addedTracks;
922     Vector<RefPtr<MediaStreamTrackPrivate>> addedPrivateTracks;
923
924     for (const auto& track : currentTracks) {
925         if (track->type() != trackType)
926             continue;
927
928         if (!trackMap.contains(track->id()))
929             addedPrivateTracks.append(track);
930     }
931
932     for (const auto& track : trackMap.values()) {
933         auto& streamTrack = track->streamTrack();
934         if (currentTracks.contains(&streamTrack))
935             continue;
936
937         removedTracks.append(track);
938         trackMap.remove(streamTrack.id());
939     }
940
941     for (auto& track : addedPrivateTracks) {
942         RefT newTrack = itemFactory(*track.get());
943         trackMap.add(track->id(), newTrack);
944         addedTracks.append(newTrack);
945     }
946
947     int index = 0;
948     for (auto& track : removedTracks)
949         configureTrack(track, index++, TrackState::Remove);
950
951     index = 0;
952     for (auto& track : addedTracks)
953         configureTrack(track, index++, TrackState::Add);
954
955     index = 0;
956     for (const auto& track : trackMap.values())
957         configureTrack(track, index++, TrackState::Configure);
958 }
959
960 void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
961 {
962     if (m_pendingSelectedTrackCheck)
963         return;
964
965     m_pendingSelectedTrackCheck = true;
966     scheduleDeferredTask([this] {
967         auto oldVideoTrack = m_activeVideoTrack;
968         bool hideVideoLayer = true;
969         m_activeVideoTrack = nullptr;
970         if (m_mediaStreamPrivate->activeVideoTrack()) {
971             for (const auto& track : m_videoTrackMap.values()) {
972                 if (&track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack()) {
973                     m_activeVideoTrack = m_mediaStreamPrivate->activeVideoTrack();
974                     if (track->selected())
975                         hideVideoLayer = false;
976                     break;
977                 }
978             }
979         }
980
981         if (oldVideoTrack != m_activeVideoTrack)
982             m_imagePainter.reset();
983         ensureLayers();
984         m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer || m_displayMode < PausedImage;
985         m_pendingSelectedTrackCheck = false;
986         updateDisplayMode();
987     });
988 }
989
990 void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
991 {
992     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
993
994     Function<void(RefPtr<AudioTrackPrivateMediaStreamCocoa>, int, TrackState)>  setAudioTrackState = [this](auto track, int index, TrackState state)
995     {
996         switch (state) {
997         case TrackState::Remove:
998             m_player->removeAudioTrack(*track);
999             break;
1000         case TrackState::Add:
1001             track->streamTrack().addObserver(*this);
1002             m_player->addAudioTrack(*track);
1003             break;
1004         case TrackState::Configure:
1005             track->setTrackIndex(index);
1006             bool enabled = track->streamTrack().enabled() && !track->streamTrack().muted();
1007             track->setEnabled(enabled);
1008             break;
1009         }
1010     };
1011     updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Type::Audio, currentTracks, &AudioTrackPrivateMediaStreamCocoa::create, setAudioTrackState);
1012
1013     Function<void(RefPtr<VideoTrackPrivateMediaStream>, int, TrackState)> setVideoTrackState = [&](auto track, int index, TrackState state)
1014     {
1015         switch (state) {
1016         case TrackState::Remove:
1017             track->streamTrack().removeObserver(*this);
1018             m_player->removeVideoTrack(*track);
1019             checkSelectedVideoTrack();
1020             break;
1021         case TrackState::Add:
1022             track->streamTrack().addObserver(*this);
1023             m_player->addVideoTrack(*track);
1024             break;
1025         case TrackState::Configure:
1026             track->setTrackIndex(index);
1027             bool selected = &track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack();
1028             track->setSelected(selected);
1029             checkSelectedVideoTrack();
1030             break;
1031         }
1032     };
1033     updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Type::Video, currentTracks, &VideoTrackPrivateMediaStream::create, setVideoTrackState);
1034 }
1035
1036 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
1037 {
1038     return std::make_unique<PlatformTimeRanges>();
1039 }
1040
1041 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::buffered() const
1042 {
1043     return std::make_unique<PlatformTimeRanges>();
1044 }
1045
1046 void MediaPlayerPrivateMediaStreamAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
1047 {
1048     paintCurrentFrameInContext(context, rect);
1049 }
1050
1051 void MediaPlayerPrivateMediaStreamAVFObjC::updateCurrentFrameImage()
1052 {
1053     if (m_imagePainter.cgImage || !m_imagePainter.mediaSample)
1054         return;
1055
1056     if (!m_imagePainter.pixelBufferConformer)
1057         m_imagePainter.pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)@{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) });
1058
1059     ASSERT(m_imagePainter.pixelBufferConformer);
1060     if (!m_imagePainter.pixelBufferConformer)
1061         return;
1062
1063     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_imagePainter.mediaSample->platformSample().sample.cmSampleBuffer));
1064     m_imagePainter.cgImage = m_imagePainter.pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer);
1065 }
1066
1067 void MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& destRect)
1068 {
1069     if (m_displayMode == None || !metaDataAvailable() || context.paintingDisabled())
1070         return;
1071
1072     if (m_displayMode != PaintItBlack && m_imagePainter.mediaSample)
1073         updateCurrentFrameImage();
1074
1075     GraphicsContextStateSaver stateSaver(context);
1076     if (m_displayMode == PaintItBlack || !m_imagePainter.cgImage || !m_imagePainter.mediaSample) {
1077         context.fillRect(IntRect(IntPoint(), IntSize(destRect.width(), destRect.height())), Color::black);
1078         return;
1079     }
1080
1081     auto image = m_imagePainter.cgImage.get();
1082     FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
1083     AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
1084     FloatRect transformedDestRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(destRect);
1085     context.concatCTM(videoTransform);
1086     context.drawNativeImage(image, imageRect.size(), transformedDestRect, imageRect);
1087 }
1088
1089 void MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged()
1090 {
1091     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
1092         ensureLayers();
1093     else
1094         destroyLayers();
1095 }
1096
1097 String MediaPlayerPrivateMediaStreamAVFObjC::engineDescription() const
1098 {
1099     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaStream Engine"));
1100     return description;
1101 }
1102
1103 void MediaPlayerPrivateMediaStreamAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1104 {
1105     if (m_readyState == readyState)
1106         return;
1107
1108     if (readyState != MediaPlayer::ReadyState::HaveNothing)
1109         m_haveSeenMetadata = true;
1110     m_readyState = readyState;
1111     characteristicsChanged();
1112
1113     m_player->readyStateChanged();
1114 }
1115
1116 void MediaPlayerPrivateMediaStreamAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1117 {
1118     if (m_networkState == networkState)
1119         return;
1120
1121     m_networkState = networkState;
1122     m_player->networkStateChanged();
1123 }
1124
1125 void MediaPlayerPrivateMediaStreamAVFObjC::setShouldBufferData(bool shouldBuffer)
1126 {
1127     if (!shouldBuffer)
1128         flushAndRemoveVideoSampleBuffers();
1129 }
1130
1131 void MediaPlayerPrivateMediaStreamAVFObjC::scheduleDeferredTask(Function<void ()>&& function)
1132 {
1133     ASSERT(function);
1134     callOnMainThread([weakThis = createWeakPtr(), function = WTFMove(function)] {
1135         if (!weakThis)
1136             return;
1137
1138         function();
1139     });
1140 }
1141
1142 void MediaPlayerPrivateMediaStreamAVFObjC::CurrentFramePainter::reset()
1143 {
1144     cgImage = nullptr;
1145     mediaSample = nullptr;
1146     pixelBufferConformer = nullptr;
1147 }
1148
1149 void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
1150 {
1151     if (!m_backgroundLayer || !m_sampleBufferDisplayLayer)
1152         return;
1153
1154     auto backgroundBounds = m_backgroundLayer.get().bounds;
1155     auto videoBounds = backgroundBounds;
1156     if (m_videoRotation == MediaSample::VideoRotation::Right || m_videoRotation == MediaSample::VideoRotation::Left)
1157         std::swap(videoBounds.size.width, videoBounds.size.height);
1158
1159     m_sampleBufferDisplayLayer.get().bounds = videoBounds;
1160     m_sampleBufferDisplayLayer.get().position = { backgroundBounds.size.width / 2, backgroundBounds.size.height / 2};
1161 }
1162
1163 void MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayerBoundsChanged()
1164 {
1165     scheduleDeferredTask([this] {
1166         runWithoutAnimations([this] {
1167             updateDisplayLayer();
1168         });
1169     });
1170 }
1171
1172 }
1173
1174 #endif