[MediaStream] A stream's first video frame should be rendered
authoreric.carlson@apple.com <eric.carlson@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Tue, 18 Dec 2018 05:31:22 +0000 (05:31 +0000)
committereric.carlson@apple.com <eric.carlson@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Tue, 18 Dec 2018 05:31:22 +0000 (05:31 +0000)
https://bugs.webkit.org/show_bug.cgi?id=192629
<rdar://problem/46664353>

Reviewed by Youenn Fablet.

Source/WebCore:

Test: fast/mediastream/media-stream-renders-first-frame.html

* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode const):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::play):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext):
* platform/mediastream/RealtimeMediaSource.cpp:
(WebCore::RealtimeMediaSource::size const):
* platform/mediastream/mac/AVVideoCaptureSource.mm:
(WebCore::AVVideoCaptureSource::processNewFrame):
* platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.mm:
(WebCore::RealtimeIncomingVideoSourceCocoa::processNewSample):

LayoutTests:

* fast/mediastream/MediaStream-video-element-displays-buffer.html: Updated.
* fast/mediastream/MediaStream-video-element-video-tracks-disabled-then-enabled-expected.txt: Ditto.
* fast/mediastream/MediaStream-video-element-video-tracks-disabled-then-enabled.html: Ditto.
* fast/mediastream/media-stream-renders-first-frame-expected.txt: Added.
* fast/mediastream/media-stream-renders-first-frame.html: Added.
* http/tests/media/media-stream/getusermedia-with-canvas-expected.txt: Removed.
* http/tests/media/media-stream/getusermedia-with-canvas.html: Removed.

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@239319 268f45cc-cd09-0410-ab3c-d52691b4dbfc

16 files changed:
LayoutTests/ChangeLog
LayoutTests/fast/mediastream/MediaStream-video-element-displays-buffer.html
LayoutTests/fast/mediastream/MediaStream-video-element-video-tracks-disabled-then-enabled-expected.txt
LayoutTests/fast/mediastream/MediaStream-video-element-video-tracks-disabled-then-enabled.html
LayoutTests/fast/mediastream/media-stream-renders-first-frame-expected.txt [new file with mode: 0644]
LayoutTests/fast/mediastream/media-stream-renders-first-frame.html [new file with mode: 0644]
LayoutTests/http/tests/media/media-stream/getusermedia-with-canvas-expected.txt [deleted file]
LayoutTests/http/tests/media/media-stream/getusermedia-with-canvas.html [deleted file]
LayoutTests/platform/gtk/TestExpectations
Source/WebCore/ChangeLog
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm
Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp
Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h
Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm
Source/WebCore/platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.mm

index ad6427e..7684fdc 100644 (file)
@@ -1,3 +1,19 @@
+2018-12-17  Eric Carlson  <eric.carlson@apple.com>
+
+        [MediaStream] A stream's first video frame should be rendered
+        https://bugs.webkit.org/show_bug.cgi?id=192629
+        <rdar://problem/46664353>
+
+        Reviewed by Youenn Fablet.
+
+        * fast/mediastream/MediaStream-video-element-displays-buffer.html: Updated.
+        * fast/mediastream/MediaStream-video-element-video-tracks-disabled-then-enabled-expected.txt: Ditto.
+        * fast/mediastream/MediaStream-video-element-video-tracks-disabled-then-enabled.html: Ditto.
+        * fast/mediastream/media-stream-renders-first-frame-expected.txt: Added.
+        * fast/mediastream/media-stream-renders-first-frame.html: Added.
+        * http/tests/media/media-stream/getusermedia-with-canvas-expected.txt: Removed.
+        * http/tests/media/media-stream/getusermedia-with-canvas.html: Removed.
+
 2018-12-17  Wenson Hsieh  <wenson_hsieh@apple.com>
 
         [iOS] Focusing a large editable element always scrolls to the top of the element
index 1fc7088..c156d43 100644 (file)
@@ -2,7 +2,6 @@
 <html>
 <head>
     <script src="../../resources/js-test-pre.js"></script>
-    <script src="./resources/getUserMedia-helper.js"></script>
 </head>
 <body onload="start()">
 <p id="description"></p>
     {
         debug(`<br> === checking pixels from ${!currentTest ? "front" : "back"} camera ===`);
         let constraints = {video : !currentTest ? true : {facingMode: "environment"}};
-        getUserMedia("allow", constraints, setupVideoElement);
+        navigator.mediaDevices.getUserMedia(constraints).then(setupVideoElement);
     }
     
     function start()
     {
         description("Tests that the stream displays captured buffers to the video element.");
+        if (window.testRunner)
+            testRunner.setUserMediaPermission(true);
 
         videos = Array.from(document.getElementsByTagName('video'));
         videos.forEach((video) => {
index 012ecbb..50860e6 100644 (file)
@@ -7,13 +7,13 @@ PASS mediaDevices.getUserMedia generated a stream successfully.
 video.srcObject = mediaStream
 
  === beginning round of pixel tests ===
-PASS pixel was black.
+PASS pixel was white.
 
  === all video tracks disabled ===
 PASS pixel was black.
 
- === video track reenabled, should NOT render current frame ===
-PASS pixel was black.
+ === video track reenabled, should render current frame ===
+PASS pixel was white.
 
  ===== play video =====
 video.play()
index 9e21d37..1428cb2 100644 (file)
@@ -2,7 +2,6 @@
 <html>
 <head>
     <script src="../../resources/js-test-pre.js"></script>
-    <script src="./resources/getUserMedia-helper.js"></script>
 </head>
 <body onload="start()">
 <p id="description"></p>
@@ -14,7 +13,6 @@
     let context;
     let mediaStream;
     let video;
-    let havePlayed = false;
     
     let buffer;
 
@@ -35,7 +33,7 @@
 
     function canvasShouldBeBlack()
     {
-        return !(mediaStream.getVideoTracks()[0].enabled && havePlayed);
+        return !mediaStream.getVideoTracks()[0].enabled;
     }
     
     function attempt(numberOfTries, call, callback)
@@ -61,7 +59,6 @@
         if (video.paused) {
             debug('<br> ===== play video =====');
             evalAndLog('video.play()');
-            havePlayed = true;
             beginTestRound();
         } else {
             debug('');
@@ -73,7 +70,7 @@
     function reenableTrack()
     {
         mediaStream.getVideoTracks()[0].enabled = true;
-        debug(`<br> === video track reenabled, should${havePlayed ? "" : " NOT"} render current frame ===`);
+        debug(`<br> === video track reenabled, should render current frame ===`);
 
         // The video is not guaranteed to render non-black frames before the canvas is drawn to and the pixels are checked.
         // A timeout is used to ensure that the pixel check is done after the video renders non-black frames.
     function start()
     {
         description("Tests that re-enabling a video MediaStreamTrack when all tracks were previously disabled causes captured media to display.");
+        if (window.testRunner)
+            testRunner.setUserMediaPermission(true);
 
         video = document.querySelector('video');
         video.addEventListener('canplay', canplay);
 
-        getUserMedia("allow", {video:true}, setupVideoElementWithStream);
+        navigator.mediaDevices.getUserMedia({ video : true })
+            .then((stream) => {
+                mediaStream = stream;
+                testPassed('mediaDevices.getUserMedia generated a stream successfully.');
+                evalAndLog('video.srcObject = mediaStream');
+            });
     }
 
     window.jsTestIsAsync = true;
diff --git a/LayoutTests/fast/mediastream/media-stream-renders-first-frame-expected.txt b/LayoutTests/fast/mediastream/media-stream-renders-first-frame-expected.txt
new file mode 100644 (file)
index 0000000..f92f3ab
--- /dev/null
@@ -0,0 +1,4 @@
+
+
+PASS A frame from the camera is shown before playback begins. 
+
diff --git a/LayoutTests/fast/mediastream/media-stream-renders-first-frame.html b/LayoutTests/fast/mediastream/media-stream-renders-first-frame.html
new file mode 100644 (file)
index 0000000..fa7d39f
--- /dev/null
@@ -0,0 +1,62 @@
+<!DOCTYPE html>
+<html>
+    <head>
+        <video id="video" width=480px height=480px controls ></video>
+        <canvas id="canvas" width=640px height=480px></canvas>
+        <script src="../../resources/testharness.js"></script>
+        <script src="../../resources/testharnessreport.js"></script>
+        <script>
+
+const canvas = document.getElementById("canvas");
+const video = document.getElementById("video");
+
+function isPixelBlack(pixel)
+{
+    return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 255;
+}
+
+function logPixel(name, pixel)
+{
+    console.log(`${name}: ${pixel[0]}, ${pixel[1]}, ${pixel[2]}, ${pixel[3]}`);
+}
+
+function checkCanvas(canvas, stream)
+{
+    return new Promise((resolve, reject) => {
+        video.srcObject = stream;
+        video.oncanplay = () => {
+            const ctx = canvas.getContext("2d");
+            ctx.drawImage(video, 0 ,0);
+
+            try {
+                setTimeout(() => {
+                    assert_false(isPixelBlack(ctx.getImageData(5, 5, 1, 1).data), "Pixel at 5x5 is not black.");
+                    assert_false(isPixelBlack(ctx.getImageData(50, 200, 1, 1).data), "Pixel at 50x200 is not black.");
+                    resolve();
+                }, 500);
+            } catch(err) {
+                reject(err);
+                return;
+            }
+        }
+    });
+}
+
+promise_test(async () => {
+    let stream = await navigator.mediaDevices.getUserMedia({ video: true });
+    stream = null;
+
+    const devices = await navigator.mediaDevices.enumerateDevices();
+    let cameraID = undefined;
+    devices.forEach(device => { if (device.label == "Mock video device 2") cameraID = device.deviceId; });
+    assert_true(cameraID !== undefined, "Found camera2");
+    
+    stream = await navigator.mediaDevices.getUserMedia({ video: { deviceId: { exact: cameraID } } });
+    
+    return checkCanvas(canvas, stream);
+
+}, "A frame from the camera is shown before playback begins.");
+
+        </script>
+    </head>
+</html>
diff --git a/LayoutTests/http/tests/media/media-stream/getusermedia-with-canvas-expected.txt b/LayoutTests/http/tests/media/media-stream/getusermedia-with-canvas-expected.txt
deleted file mode 100644 (file)
index 012ecbb..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-Tests that re-enabling a video MediaStreamTrack when all tracks were previously disabled causes captured media to display.
-
-On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE".
-
-
-PASS mediaDevices.getUserMedia generated a stream successfully.
-video.srcObject = mediaStream
-
- === beginning round of pixel tests ===
-PASS pixel was black.
-
- === all video tracks disabled ===
-PASS pixel was black.
-
- === video track reenabled, should NOT render current frame ===
-PASS pixel was black.
-
- ===== play video =====
-video.play()
-
- === beginning round of pixel tests ===
-PASS pixel was white.
-
- === all video tracks disabled ===
-PASS pixel was black.
-
- === video track reenabled, should render current frame ===
-PASS pixel was white.
-
-PASS successfullyParsed is true
-
-TEST COMPLETE
diff --git a/LayoutTests/http/tests/media/media-stream/getusermedia-with-canvas.html b/LayoutTests/http/tests/media/media-stream/getusermedia-with-canvas.html
deleted file mode 100644 (file)
index 69ae60d..0000000
+++ /dev/null
@@ -1,137 +0,0 @@
-<!DOCTYPE html>
-<html>
-<head>
-    <script src="/resources/js-test-pre.js"></script>
-    <script src="./resources/getUserMedia-helper.js"></script>
-</head>
-<body onload="start()">
-<p id="description"></p>
-<div id="console"></div>
-<video controls width="680" height="360"></video>
-<canvas width="680" height="360"></canvas>
-<script>
-    let canvas;
-    let context;
-    let mediaStream;
-    let video;
-    let havePlayed = false;
-
-    let buffer;
-
-    function isPixelBlack(pixel)
-    {
-        return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 255;
-    }
-
-    function isPixelTransparent(pixel)
-    {
-        return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 0;
-    }
-
-    function isPixelWhite(pixel)
-    {
-        return pixel[0] === 255 && pixel[1] === 255 && pixel[2] === 255 && pixel[3] === 255;
-    }
-
-    function canvasShouldBeBlack()
-    {
-        return !(mediaStream.getVideoTracks()[0].enabled && havePlayed);
-    }
-
-    function attempt(numberOfTries, call, callback)
-    {
-        if (numberOfTries <= 0) {
-            testFailed('Pixel check did not succeed after multiple tries.');
-            return;
-        }
-
-        let attemptSucceeded = call();
-        if (attemptSucceeded) {
-            testPassed(canvasShouldBeBlack() ? 'pixel was black.' : 'pixel was white.');
-            callback();
-
-            return;
-        }
-
-        setTimeout(() => { attempt(--numberOfTries, call, callback); }, 50);
-    }
-
-    function repeatWithVideoPlayingAndFinishTest()
-    {
-        if (video.paused) {
-            debug('<br> ===== play video =====');
-            evalAndLog('video.play()');
-            havePlayed = true;
-            beginTestRound();
-        } else {
-            debug('');
-            video.pause();
-            finishJSTest();
-        }
-    }
-
-    function reenableTrack()
-    {
-        mediaStream.getVideoTracks()[0].enabled = true;
-        debug(`<br> === video track reenabled, should${havePlayed ? "" : " NOT"} render current frame ===`);
-
-        // The video is not guaranteed to render non-black frames before the canvas is drawn to and the pixels are checked.
-        // A timeout is used to ensure that the pixel check is done after the video renders non-black frames.
-        attempt(10, checkPixels, repeatWithVideoPlayingAndFinishTest);
-    }
-
-    function checkPixels()
-    {
-        context.clearRect(0, 0, canvas.width, canvas.height);
-        buffer = context.getImageData(30, 242, 1, 1).data;
-        if (!isPixelTransparent(buffer))
-            testFailed('pixel was not transparent after clearing canvas.');
-
-        context.drawImage(video, 0, 0, canvas.width, canvas.height);
-        buffer = context.getImageData(30, 242, 1, 1).data;
-
-        if (!canvasShouldBeBlack())
-            return isPixelWhite(buffer);
-        else
-            return isPixelBlack(buffer);
-    }
-
-    function disableAllTracks()
-    {
-        mediaStream.getVideoTracks()[0].enabled = false;
-        debug('<br> === all video tracks disabled ===');
-
-        // The video is not guaranteed to render black frames before the canvas is drawn to and the pixels are checked.
-        // A timeout is used to ensure that the pixel check is done after the video renders black frames.
-        attempt(10, checkPixels, reenableTrack);
-    }
-
-    function beginTestRound()
-    {
-        debug('<br> === beginning round of pixel tests ===');
-        attempt(10, checkPixels, disableAllTracks);
-    }
-
-    function canplay()
-    {
-        canvas = document.querySelector('canvas');
-        context = canvas.getContext('2d');
-
-        beginTestRound();
-    }
-
-    function start()
-    {
-        description("Tests that re-enabling a video MediaStreamTrack when all tracks were previously disabled causes captured media to display.");
-
-        video = document.querySelector('video');
-        video.addEventListener('canplay', canplay);
-
-        getUserMedia("allow", {video:true}, setupVideoElementWithStream);
-    }
-
-    window.jsTestIsAsync = true;
-</script>
-<script src="/resources/js-test-post.js"></script>
-</body>
-</html>
index 2aad6a7..992d571 100644 (file)
@@ -610,7 +610,6 @@ webkit.org/b/79203 webaudio/mediastreamaudiosourcenode.html [ Failure ]
 webkit.org/b/79203 fast/mediastream/MediaStream-video-element-video-tracks-disabled-then-enabled.html [ Timeout Failure ]
 webkit.org/b/79203 fast/mediastream/MediaStream-video-element-displays-buffer.html [ Failure ]
 webkit.org/b/79203 fast/mediastream/RTCPeerConnection-stats.html [ Timeout Crash ]
-webkit.org/b/79203 http/tests/media/media-stream/getusermedia-with-canvas.html [ Timeout ]
 webkit.org/b/79203 imported/w3c/web-platform-tests/mediacapture-streams/MediaStream-MediaElement-preload-none.https.html [ Failure Pass ]
 webkit.org/b/79203 imported/w3c/web-platform-tests/mediacapture-streams/MediaStreamTrack-end-manual.https.html [ Failure ]
 webkit.org/b/151344 fast/mediastream/MediaStream-add-ended-tracks.html [ Timeout ]
index fb221e0..6ff69a5 100644 (file)
@@ -1,3 +1,31 @@
+2018-12-17  Eric Carlson  <eric.carlson@apple.com>
+
+        [MediaStream] A stream's first video frame should be rendered
+        https://bugs.webkit.org/show_bug.cgi?id=192629
+        <rdar://problem/46664353>
+
+        Reviewed by Youenn Fablet.
+
+        Test: fast/mediastream/media-stream-renders-first-frame.html
+
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode const):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::play):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext):
+        * platform/mediastream/RealtimeMediaSource.cpp:
+        (WebCore::RealtimeMediaSource::size const):
+        * platform/mediastream/mac/AVVideoCaptureSource.mm:
+        (WebCore::AVVideoCaptureSource::processNewFrame):
+        * platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.mm:
+        (WebCore::RealtimeIncomingVideoSourceCocoa::processNewSample):
+
 2018-12-17  Justin Michaud  <justin_michaud@apple.com>
 
         Bindings generator should support Conditional= along with CachedAttribute
index 6bb1016..7959411 100644 (file)
@@ -189,6 +189,7 @@ private:
     enum DisplayMode {
         None,
         PaintItBlack,
+        WaitingForFirstImage,
         PausedImage,
         LivePreview,
     };
@@ -229,6 +230,8 @@ private:
 
     void applicationDidBecomeActive() final;
 
+    bool hideBackgroundLayer() const { return (!m_activeVideoTrack || m_waitingForFirstImage) && m_displayMode != PaintItBlack; }
+
     MediaPlayer* m_player { nullptr };
     RefPtr<MediaStreamPrivate> m_mediaStreamPrivate;
     RefPtr<MediaStreamTrackPrivate> m_activeVideoTrack;
@@ -274,10 +277,10 @@ private:
     bool m_ended { false };
     bool m_hasEverEnqueuedVideoFrame { false };
     bool m_pendingSelectedTrackCheck { false };
-    bool m_shouldDisplayFirstVideoFrame { false };
     bool m_transformIsValid { false };
     bool m_visible { false };
     bool m_haveSeenMetadata { false };
+    bool m_waitingForFirstImage { false };
 };
     
 }
index da6b8d4..1b07b4e 100644 (file)
@@ -165,6 +165,14 @@ using namespace WebCore;
 
     if ((CALayer *)object == _parent->backgroundLayer()) {
         if ([keyPath isEqualToString:@"bounds"]) {
+            if (!_parent)
+                return;
+
+            if (isMainThread()) {
+                _parent->backgroundLayerBoundsChanged();
+                return;
+            }
+
             callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
                 if (!protectedSelf->_parent)
                     return;
@@ -363,7 +371,7 @@ void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPr
             updateReadyState();
     }
 
-    if (m_displayMode != LivePreview || (m_displayMode == PausedImage && m_imagePainter.mediaSample))
+    if (m_displayMode != LivePreview && !m_waitingForFirstImage)
         return;
 
     auto videoTrack = m_videoTrackMap.get(track.id());
@@ -391,6 +399,10 @@ void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPr
     }
 
     enqueueCorrectedVideoSample(sample);
+    if (m_waitingForFirstImage) {
+        m_waitingForFirstImage = false;
+        updateDisplayMode();
+    }
 }
 
 void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
@@ -481,9 +493,14 @@ void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
     m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
 
     m_backgroundLayer = adoptNS([[CALayer alloc] init]);
+    m_backgroundLayer.get().hidden = hideBackgroundLayer();
+
     m_backgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
     m_backgroundLayer.get().needsDisplayOnBoundsChange = YES;
 
+    auto size = snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size();
+    m_backgroundLayer.get().bounds = CGRectMake(0, 0, size.width(), size.height());
+
     [m_statusChangeListener beginObservingLayers];
 
     [m_backgroundLayer addSublayer:m_sampleBufferDisplayLayer.get()];
@@ -496,7 +513,7 @@ void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers()
     updateRenderingMode();
     updateDisplayLayer();
 
-    m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
+    m_videoFullscreenLayerManager->setVideoLayer(m_backgroundLayer.get(), size);
 }
 
 void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
@@ -601,6 +618,9 @@ MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamA
             return PaintItBlack;
     }
 
+    if (m_waitingForFirstImage)
+        return WaitingForFirstImage;
+
     if (playing() && !m_ended) {
         if (!m_mediaStreamPrivate->isProducingData())
             return PausedImage;
@@ -623,9 +643,16 @@ bool MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode()
     INFO_LOG(LOGIDENTIFIER, "updated to ", static_cast<int>(displayMode));
     m_displayMode = displayMode;
 
-    if (m_sampleBufferDisplayLayer) {
-        runWithoutAnimations([this] {
-            m_sampleBufferDisplayLayer.get().hidden = m_displayMode < PausedImage;
+    auto hidden = m_displayMode < PausedImage;
+    if (m_sampleBufferDisplayLayer && m_sampleBufferDisplayLayer.get().hidden != hidden) {
+        runWithoutAnimations([this, hidden] {
+            m_sampleBufferDisplayLayer.get().hidden = hidden;
+        });
+    }
+    hidden = hideBackgroundLayer();
+    if (m_backgroundLayer && m_backgroundLayer.get().hidden != hidden) {
+        runWithoutAnimations([this, hidden] {
+            m_backgroundLayer.get().hidden = hidden;
         });
     }
 
@@ -646,7 +673,6 @@ void MediaPlayerPrivateMediaStreamAVFObjC::play()
     for (const auto& track : m_audioTrackMap.values())
         track->play();
 
-    m_shouldDisplayFirstVideoFrame = true;
     updateDisplayMode();
 
     scheduleDeferredTask([this] {
@@ -765,13 +791,13 @@ MediaPlayer::ReadyState MediaPlayerPrivateMediaStreamAVFObjC::currentReadyState(
             allTracksAreLive = false;
 
         if (track == m_mediaStreamPrivate->activeVideoTrack() && !m_imagePainter.mediaSample) {
-            if (!m_haveSeenMetadata)
+            if (!m_haveSeenMetadata || m_waitingForFirstImage)
                 return MediaPlayer::ReadyState::HaveNothing;
             allTracksAreLive = false;
         }
     }
 
-    if (!allTracksAreLive && !m_haveSeenMetadata)
+    if (m_waitingForFirstImage || (!allTracksAreLive && !m_haveSeenMetadata))
         return MediaPlayer::ReadyState::HaveMetadata;
 
     return MediaPlayer::ReadyState::HaveEnoughData;
@@ -828,6 +854,8 @@ void MediaPlayerPrivateMediaStreamAVFObjC::characteristicsChanged()
     if (intrinsicSize.height() != m_intrinsicSize.height() || intrinsicSize.width() != m_intrinsicSize.width()) {
         m_intrinsicSize = intrinsicSize;
         sizeChanged = true;
+        if (m_playbackState == PlaybackState::None)
+            m_playbackState = PlaybackState::Paused;
     }
 
     updateTracks();
@@ -979,10 +1007,15 @@ void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
             }
         }
 
-        if (oldVideoTrack != m_activeVideoTrack)
+        if (oldVideoTrack != m_activeVideoTrack) {
             m_imagePainter.reset();
+            if (m_displayMode == None)
+                m_waitingForFirstImage = true;
+        }
         ensureLayers();
         m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer || m_displayMode < PausedImage;
+        m_backgroundLayer.get().hidden = hideBackgroundLayer();
+
         m_pendingSelectedTrackCheck = false;
         updateDisplayMode();
     });
@@ -1075,11 +1108,14 @@ void MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext(GraphicsCo
         updateCurrentFrameImage();
 
     GraphicsContextStateSaver stateSaver(context);
-    if (m_displayMode == PaintItBlack || !m_imagePainter.cgImage || !m_imagePainter.mediaSample) {
+    if (m_displayMode == PaintItBlack) {
         context.fillRect(IntRect(IntPoint(), IntSize(destRect.width(), destRect.height())), Color::black);
         return;
     }
 
+    if (!m_imagePainter.cgImage || !m_imagePainter.mediaSample)
+        return;
+
     auto image = m_imagePainter.cgImage.get();
     FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
     AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
@@ -1164,10 +1200,8 @@ void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
 
 void MediaPlayerPrivateMediaStreamAVFObjC::backgroundLayerBoundsChanged()
 {
-    scheduleDeferredTask([this] {
-        runWithoutAnimations([this] {
-            updateDisplayLayer();
-        });
+    runWithoutAnimations([this] {
+        updateDisplayLayer();
     });
 }
 
index 3bac933..2e7f731 100644 (file)
@@ -874,7 +874,9 @@ const IntSize RealtimeMediaSource::size() const
     auto size = m_size;
 
     if (size.isEmpty() && !m_intrinsicSize.isEmpty()) {
-        if (size.width())
+        if (size.isZero())
+            size = m_intrinsicSize;
+        else if (size.width())
             size.setHeight(size.width() * (m_intrinsicSize.height() / static_cast<double>(m_intrinsicSize.width())));
         else if (size.height())
             size.setWidth(size.height() * (m_intrinsicSize.width() / static_cast<double>(m_intrinsicSize.height())));
index a83d661..a4767c4 100644 (file)
@@ -126,6 +126,7 @@ private:
     IntSize m_pendingSize;
     double m_pendingFrameRate;
     InterruptionReason m_interruption { InterruptionReason::None };
+    int m_framesToDropAtStartup { 0 };
     bool m_isRunning { false };
 };
 
index 9410587..7591746 100644 (file)
@@ -524,11 +524,15 @@ void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
         return;
 
     m_buffer = &sample.get();
+    setIntrinsicSize(expandedIntSize(sample->presentationSize()));
     dispatchMediaSampleToObservers(WTFMove(sample));
 }
 
 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
 {
+    if (m_framesToDropAtStartup && m_framesToDropAtStartup--)
+        return;
+
     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
         processNewFrame(WTFMove(sample));
@@ -542,6 +546,9 @@ void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
             return;
 
         m_isRunning = state;
+        if (m_isRunning)
+            m_framesToDropAtStartup = 4;
+
         notifyMutedChange(!m_isRunning);
     });
 }
@@ -573,7 +580,7 @@ void AVVideoCaptureSource::generatePresets()
     for (AVCaptureDeviceFormatType* format in [device() formats]) {
 
         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
-        IntSize size = {dimensions.width, dimensions.height};
+        IntSize size = { dimensions.width, dimensions.height };
         auto index = presets.findMatching([&size](auto& preset) {
             return size == preset->size;
         });
index 2f80e14..b09ac04 100644 (file)
@@ -229,7 +229,7 @@ void RealtimeIncomingVideoSourceCocoa::processNewSample(CMSampleBufferRef sample
     m_buffer = sample;
     auto size = this->size();
     if (WTF::safeCast<int>(width) != size.width() || WTF::safeCast<int>(height) != size.height())
-        setSize(IntSize(width, height));
+        setIntrinsicSize(IntSize(width, height));
 
     videoSampleAvailable(MediaSampleAVFObjC::create(sample, rotation));
 }