[MediaStream] Allow ports to optionally do screen capture in the UI process
authoreric.carlson@apple.com <eric.carlson@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 18 Oct 2018 22:52:15 +0000 (22:52 +0000)
committereric.carlson@apple.com <eric.carlson@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 18 Oct 2018 22:52:15 +0000 (22:52 +0000)
https://bugs.webkit.org/show_bug.cgi?id=190728
<rdar://problem/45376824>

Reviewed by Jer Noble and Tim Horton.

Source/WebCore:

No new tests, covered by existing tests.

* Sources.txt: Add RemoteVideoSample.cpp.
* WebCore.xcodeproj/project.pbxproj: Ditto.

* platform/MediaSample.h:
(WebCore::MediaSample::videoPixelFormat const):

* platform/graphics/RemoteVideoSample.cpp: Added.
(WebCore::RemoteVideoSample::~RemoteVideoSample):
(WebCore::RemoteVideoSample::create):
(WebCore::RemoteVideoSample::RemoteVideoSample):
(WebCore::RemoteVideoSample::surface):
* platform/graphics/RemoteVideoSample.h: Added.
(WebCore::RemoteVideoSample::time const):
(WebCore::RemoteVideoSample::videoFormat const):
(WebCore::RemoteVideoSample::size const):
(WebCore::RemoteVideoSample::encode const):
(WebCore::RemoteVideoSample::decode):
* platform/graphics/avfoundation/objc/MediaSampleAVFObjC.h:
* platform/graphics/cv/ImageTransferSessionVT.h:

* platform/mediastream/RealtimeMediaSource.cpp:
(WebCore::RealtimeMediaSource::remoteVideoSampleAvailable): Call observers.
* platform/mediastream/RealtimeMediaSource.h:

* platform/mediastream/RealtimeVideoSource.cpp:
(WebCore::RealtimeVideoSource::dispatchMediaSampleToObservers): Dispatch remote samples without
resizing, resize local samples if necessary.
* platform/mediastream/RealtimeVideoSource.h:

* platform/mediastream/mac/AVVideoCaptureSource.mm:
(WebCore::AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection): Don't resize
samples, it will be done in the base class if necessary.

* platform/mediastream/mac/DisplayCaptureSourceCocoa.cpp:
(WebCore::DisplayCaptureSourceCocoa::emitFrame): Don't resize samples when running in the UI
process, it will be done in the web process.

* platform/mediastream/mac/RealtimeOutgoingVideoSourceCocoa.cpp: Remove unneeded include.

Source/WebKit:

* Shared/WebProcessCreationParameters.cpp:
(WebKit::WebProcessCreationParameters::encode const): Encode shouldCaptureDisplayInUIProcess.
(WebKit::WebProcessCreationParameters::decode): Decode shouldCaptureDisplayInUIProcess.
* Shared/WebProcessCreationParameters.h:

* UIProcess/API/APIProcessPoolConfiguration.cpp:
(API::ProcessPoolConfiguration::copy): Copy shouldCaptureDisplayInUIProcess.
* UIProcess/API/APIProcessPoolConfiguration.h:

* UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp:
(WebKit::UserMediaCaptureManagerProxy::SourceProxy::remoteVideoSampleAvailable):
(WebKit::UserMediaCaptureManagerProxy::createMediaSourceForCaptureDeviceWithConstraints): Remove
RealtimeMediaSource::Type parameter, CaptureDevice has the same information. Deal with display
capture "devices".
* UIProcess/Cocoa/UserMediaCaptureManagerProxy.h:
* UIProcess/Cocoa/UserMediaCaptureManagerProxy.messages.in:

* UIProcess/WebProcessPool.cpp:
(WebKit::WebProcessPool::initializeNewWebProcess): Copy shouldCaptureDisplayInUIProcess.

* WebProcess/cocoa/UserMediaCaptureManager.cpp:
(WebKit::UserMediaCaptureManager::Source::Source): Only allocate a ring buffer for Audio sources.
(WebKit::UserMediaCaptureManager::Source::~Source): Same for deallocate.
(WebKit::UserMediaCaptureManager::Source::storage): m_ringBuffer is a pointer.
(WebKit::UserMediaCaptureManager::Source::setStorage): Ditto.
(WebKit::UserMediaCaptureManager::Source::setRingBufferFrameBounds): Ditto.
(WebKit::UserMediaCaptureManager::Source::audioSamplesAvailable): Ditto.
(WebKit::UserMediaCaptureManager::Source::remoteVideoSampleAvailable): Create a
PixelBuffer-backed media sample and call videoSampleAvailable.
(WebKit::UserMediaCaptureManager::~UserMediaCaptureManager): Clear the audio and display capture
factory overrides.
(WebKit::UserMediaCaptureManager::initialize): Set the audio and display capture factory overrides.
(WebKit::UserMediaCaptureManager::createCaptureSource):
(WebKit::UserMediaCaptureManager::remoteVideoSampleAvailable):
* WebProcess/cocoa/UserMediaCaptureManager.h:
* WebProcess/cocoa/UserMediaCaptureManager.messages.in:

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@237272 268f45cc-cd09-0410-ab3c-d52691b4dbfc

28 files changed:
Source/WebCore/ChangeLog
Source/WebCore/Sources.txt
Source/WebCore/WebCore.xcodeproj/project.pbxproj
Source/WebCore/platform/MediaSample.h
Source/WebCore/platform/graphics/RemoteVideoSample.cpp [new file with mode: 0644]
Source/WebCore/platform/graphics/RemoteVideoSample.h [new file with mode: 0644]
Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.h
Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.h
Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.mm
Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp
Source/WebCore/platform/mediastream/RealtimeMediaSource.h
Source/WebCore/platform/mediastream/RealtimeVideoSource.cpp
Source/WebCore/platform/mediastream/RealtimeVideoSource.h
Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm
Source/WebCore/platform/mediastream/mac/DisplayCaptureSourceCocoa.cpp
Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSourceCocoa.cpp
Source/WebKit/ChangeLog
Source/WebKit/Shared/WebProcessCreationParameters.cpp
Source/WebKit/Shared/WebProcessCreationParameters.h
Source/WebKit/UIProcess/API/APIProcessPoolConfiguration.cpp
Source/WebKit/UIProcess/API/APIProcessPoolConfiguration.h
Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp
Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h
Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.messages.in
Source/WebKit/UIProcess/WebProcessPool.cpp
Source/WebKit/WebProcess/cocoa/UserMediaCaptureManager.cpp
Source/WebKit/WebProcess/cocoa/UserMediaCaptureManager.h
Source/WebKit/WebProcess/cocoa/UserMediaCaptureManager.messages.in

index 0577798..23269ce 100644 (file)
@@ -1,3 +1,52 @@
+2018-10-18  Eric Carlson  <eric.carlson@apple.com>
+
+        [MediaStream] Allow ports to optionally do screen capture in the UI process
+        https://bugs.webkit.org/show_bug.cgi?id=190728
+        <rdar://problem/45376824>
+
+        Reviewed by Jer Noble and Tim Horton.
+
+        No new tests, covered by existing tests.
+
+        * Sources.txt: Add RemoteVideoSample.cpp.
+        * WebCore.xcodeproj/project.pbxproj: Ditto.
+
+        * platform/MediaSample.h:
+        (WebCore::MediaSample::videoPixelFormat const):
+
+        * platform/graphics/RemoteVideoSample.cpp: Added.
+        (WebCore::RemoteVideoSample::~RemoteVideoSample):
+        (WebCore::RemoteVideoSample::create):
+        (WebCore::RemoteVideoSample::RemoteVideoSample):
+        (WebCore::RemoteVideoSample::surface):
+        * platform/graphics/RemoteVideoSample.h: Added.
+        (WebCore::RemoteVideoSample::time const):
+        (WebCore::RemoteVideoSample::videoFormat const):
+        (WebCore::RemoteVideoSample::size const):
+        (WebCore::RemoteVideoSample::encode const):
+        (WebCore::RemoteVideoSample::decode):
+        * platform/graphics/avfoundation/objc/MediaSampleAVFObjC.h:
+        * platform/graphics/cv/ImageTransferSessionVT.h:
+
+        * platform/mediastream/RealtimeMediaSource.cpp:
+        (WebCore::RealtimeMediaSource::remoteVideoSampleAvailable): Call observers.
+        * platform/mediastream/RealtimeMediaSource.h:
+
+        * platform/mediastream/RealtimeVideoSource.cpp:
+        (WebCore::RealtimeVideoSource::dispatchMediaSampleToObservers): Dispatch remote samples without
+        resizing, resize local samples if necessary.
+        * platform/mediastream/RealtimeVideoSource.h:
+
+        * platform/mediastream/mac/AVVideoCaptureSource.mm:
+        (WebCore::AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection): Don't resize
+        samples, it will be done in the base class if necessary.
+
+        * platform/mediastream/mac/DisplayCaptureSourceCocoa.cpp:
+        (WebCore::DisplayCaptureSourceCocoa::emitFrame): Don't resize samples when running in the UI
+        process, it will be done in the web process.
+
+        * platform/mediastream/mac/RealtimeOutgoingVideoSourceCocoa.cpp: Remove unneeded include.
+
 2018-10-18  Jer Noble  <jer.noble@apple.com>
 
         Safari is not able to adapt between H264 streams with EditList and without EditList
index 8c1a3b7..e626ffb 100644 (file)
@@ -1668,6 +1668,7 @@ platform/graphics/PathUtilities.cpp
 platform/graphics/Pattern.cpp
 platform/graphics/PlatformTimeRanges.cpp
 platform/graphics/Region.cpp
+platform/graphics/RemoteVideoSample.cpp
 platform/graphics/RoundedRect.cpp
 platform/graphics/ShadowBlur.cpp
 platform/graphics/StringTruncator.cpp
index 223ca33..37b9887 100644 (file)
                073794FA19F5864E00E5A045 /* RTCDataChannelHandlerMock.h in Headers */ = {isa = PBXBuildFile; fileRef = 073794F419F5864E00E5A045 /* RTCDataChannelHandlerMock.h */; };
                073794FE19F5864E00E5A045 /* RTCNotifiersMock.h in Headers */ = {isa = PBXBuildFile; fileRef = 073794F819F5864E00E5A045 /* RTCNotifiersMock.h */; };
                07394ECA1BAB2CD700BE99CD /* MediaDevicesRequest.h in Headers */ = {isa = PBXBuildFile; fileRef = 07394EC91BAB2CD700BE99CD /* MediaDevicesRequest.h */; settings = {ATTRIBUTES = (Private, ); }; };
+               073A15542177A42600EA08F2 /* RemoteVideoSample.h in Headers */ = {isa = PBXBuildFile; fileRef = 073A15532177A39A00EA08F2 /* RemoteVideoSample.h */; settings = {ATTRIBUTES = (Private, ); }; };
                073B87671E4385AC0071C0EC /* AudioSampleBufferList.h in Headers */ = {isa = PBXBuildFile; fileRef = 073B87631E43859D0071C0EC /* AudioSampleBufferList.h */; };
                073B87691E4385AC0071C0EC /* AudioSampleDataSource.h in Headers */ = {isa = PBXBuildFile; fileRef = 073B87651E43859D0071C0EC /* AudioSampleDataSource.h */; };
                073BDC051F68436100EE34ED /* TrackPrivateBase.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 076E11BE1F683E0D00177395 /* TrackPrivateBase.cpp */; };
                1AFE119A0CBFFCC4003017FA /* JSSQLResultSetRowList.h in Headers */ = {isa = PBXBuildFile; fileRef = 1AFE11980CBFFCC4003017FA /* JSSQLResultSetRowList.h */; };
                1AFFC4581D5E865500267A66 /* WebGLBlacklist.h in Headers */ = {isa = PBXBuildFile; fileRef = 1AFFC4511D5E7EC700267A66 /* WebGLBlacklist.h */; settings = {ATTRIBUTES = (Private, ); }; };
                1AFFC4591D5E866100267A66 /* PluginBlacklist.h in Headers */ = {isa = PBXBuildFile; fileRef = 1AFFC44F1D5E7EC700267A66 /* PluginBlacklist.h */; settings = {ATTRIBUTES = (Private, ); }; };
-               1B124D8D1D380B7000ECDFB0 /* MediaSampleAVFObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 1B124D8C1D380B7000ECDFB0 /* MediaSampleAVFObjC.h */; };
+               1B124D8D1D380B7000ECDFB0 /* MediaSampleAVFObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 1B124D8C1D380B7000ECDFB0 /* MediaSampleAVFObjC.h */; settings = {ATTRIBUTES = (Private, ); }; };
                1B124D8F1D380BB600ECDFB0 /* MediaSampleAVFObjC.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1B124D8E1D380BB600ECDFB0 /* MediaSampleAVFObjC.mm */; };
                1BF9DB3C1D3973AD0026AEB7 /* MediaSample.h in Headers */ = {isa = PBXBuildFile; fileRef = CD641EC7181ED60100EE4C41 /* MediaSample.h */; settings = {ATTRIBUTES = (Private, ); }; };
                1C010701192594DF008A4201 /* InlineTextBoxStyle.h in Headers */ = {isa = PBXBuildFile; fileRef = 1C0106FF192594DF008A4201 /* InlineTextBoxStyle.h */; };
                073794F819F5864E00E5A045 /* RTCNotifiersMock.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCNotifiersMock.h; sourceTree = "<group>"; };
                07394EC71BAB2CCD00BE99CD /* MediaDevicesRequest.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = MediaDevicesRequest.cpp; sourceTree = "<group>"; };
                07394EC91BAB2CD700BE99CD /* MediaDevicesRequest.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MediaDevicesRequest.h; sourceTree = "<group>"; };
+               073A15512177A39800EA08F2 /* RemoteVideoSample.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = RemoteVideoSample.cpp; sourceTree = "<group>"; };
+               073A15532177A39A00EA08F2 /* RemoteVideoSample.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RemoteVideoSample.h; sourceTree = "<group>"; };
                073B87561E40DCE50071C0EC /* AudioStreamDescription.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioStreamDescription.h; sourceTree = "<group>"; };
                073B87571E40DCFD0071C0EC /* CAAudioStreamDescription.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CAAudioStreamDescription.cpp; sourceTree = "<group>"; };
                073B87581E40DCFD0071C0EC /* CAAudioStreamDescription.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CAAudioStreamDescription.h; sourceTree = "<group>"; };
                                E42050162141901B0066EF3B /* ProcessWarming.cpp */,
                                E42050142141901A0066EF3B /* ProcessWarming.h */,
                                CD9A87F9215D6CF3006F17B5 /* Quirks.cpp */,
+                               CD9A87F2215D43A6006F17B5 /* Quirks.cpp */,
                                CD9A87FB215D6CF3006F17B5 /* Quirks.h */,
+                               CD9A87F1215D43A6006F17B5 /* Quirks.h */,
                                46B9518D207D632A00A7D2DD /* RemoteDOMWindow.cpp */,
                                46B9518E207D632A00A7D2DD /* RemoteDOMWindow.h */,
                                46BCBBC3208500A700710638 /* RemoteDOMWindow.idl */,
                                A5071E841C56D079009951BE /* ResourceUsageThread.h */,
                                7C52229B1E1DAE47002CB8F7 /* RuntimeEnabledFeatures.cpp */,
                                7C52229C1E1DAE47002CB8F7 /* RuntimeEnabledFeatures.h */,
-                               CD9A87F2215D43A6006F17B5 /* Quirks.cpp */,
-                               CD9A87F1215D43A6006F17B5 /* Quirks.h */,
                                BCEC01BA0C274DAC009F4EC9 /* Screen.cpp */,
                                BCEC01BB0C274DAC009F4EC9 /* Screen.h */,
                                BCEC01BC0C274DAC009F4EC9 /* Screen.idl */,
                                074E82B918A69F0E007EF54C /* PlatformTimeRanges.h */,
                                BCAB417F13E356E800D8AAF3 /* Region.cpp */,
                                BCAB418013E356E800D8AAF3 /* Region.h */,
+                               073A15512177A39800EA08F2 /* RemoteVideoSample.cpp */,
+                               073A15532177A39A00EA08F2 /* RemoteVideoSample.h */,
                                A73F95FC12C97BFE0031AAF9 /* RoundedRect.cpp */,
                                A73F95FD12C97BFE0031AAF9 /* RoundedRect.h */,
                                0F3DD44D12F5EA1B000D9190 /* ShadowBlur.cpp */,
                                CD94A5C91F71CA9D00F525C5 /* CDMClient.h */,
                                CD063F801E23FA8900812BE3 /* InitDataRegistry.cpp */,
                                CD063F811E23FA8900812BE3 /* InitDataRegistry.h */,
+                               CDF7569D215C244400EFCB50 /* MediaKeyEncryptionScheme.h */,
+                               CDF7569E215C285E00EFCB50 /* MediaKeyEncryptionScheme.idl */,
                                2D9BF72F1DBFDC0F007A7D99 /* MediaKeyMessageEvent.cpp */,
                                2D9BF7301DBFDC0F007A7D99 /* MediaKeyMessageEvent.h */,
                                2D9BF6F51DBFB71F007A7D99 /* MediaKeyMessageEvent.idl */,
                                2D9BF7331DBFDC0F007A7D99 /* MediaKeys.cpp */,
                                2D9BF7341DBFDC0F007A7D99 /* MediaKeys.h */,
                                2D9BF6F61DBFB71F007A7D99 /* MediaKeys.idl */,
-                               CDF7569D215C244400EFCB50 /* MediaKeyEncryptionScheme.h */,
-                               CDF7569E215C285E00EFCB50 /* MediaKeyEncryptionScheme.idl */,
                                2D9BF7351DBFDC0F007A7D99 /* MediaKeySession.cpp */,
                                2D9BF7361DBFDC0F007A7D99 /* MediaKeySession.h */,
                                2D9BF6F71DBFB71F007A7D99 /* MediaKeySession.idl */,
                                FD23A12613F5FA5900F67001 /* JSMediaElementAudioSourceNode.h in Headers */,
                                2D9BF7121DBFD914007A7D99 /* JSMediaEncryptedEvent.h in Headers */,
                                E44614190CD6826900FADA75 /* JSMediaError.h in Headers */,
+                               CDF756A3215C29E900EFCB50 /* JSMediaKeyEncryptionScheme.h in Headers */,
                                2D9BF7281DBFDB0D007A7D99 /* JSMediaKeyMessageEvent.h in Headers */,
                                2D9BF7291DBFDB10007A7D99 /* JSMediaKeys.h in Headers */,
                                2D9BF72A1DBFDB13007A7D99 /* JSMediaKeySession.h in Headers */,
                                51058AE01D67C229009A538C /* MockGamepad.h in Headers */,
                                51058AE21D67C229009A538C /* MockGamepadProvider.h in Headers */,
                                413CCD4A20DE034F0065A21A /* MockMediaDevice.h in Headers */,
-                               CDF756A3215C29E900EFCB50 /* JSMediaKeyEncryptionScheme.h in Headers */,
                                CDF2B0131820540600F2B424 /* MockMediaPlayerMediaSource.h in Headers */,
                                CDF2B0151820540600F2B424 /* MockMediaSourcePrivate.h in Headers */,
                                A1BB85A92159B02C0067E07D /* MockPaymentError.h in Headers */,
                                CD8ACA891D237AA200ECC59E /* RemoteCommandListenerMac.h in Headers */,
                                46B95199207D634D00A7D2DD /* RemoteDOMWindow.h in Headers */,
                                46B9519A207D635400A7D2DD /* RemoteFrame.h in Headers */,
+                               073A15542177A42600EA08F2 /* RemoteVideoSample.h in Headers */,
                                D06C0D8F0CFD11460065F43F /* RemoveFormatCommand.h in Headers */,
                                93309E05099E64920056E581 /* RemoveNodeCommand.h in Headers */,
                                93309E07099E64920056E581 /* RemoveNodePreservingChildrenCommand.h in Headers */,
index 050d938..7516656 100644 (file)
@@ -93,6 +93,7 @@ public:
     };
     virtual VideoRotation videoRotation() const { return VideoRotation::None; }
     virtual bool videoMirrored() const { return false; }
+    virtual uint32_t videoPixelFormat() const { return 0; }
 
     bool isSync() const { return flags() & IsSync; }
     bool isNonDisplaying() const { return flags() & IsNonDisplaying; }
diff --git a/Source/WebCore/platform/graphics/RemoteVideoSample.cpp b/Source/WebCore/platform/graphics/RemoteVideoSample.cpp
new file mode 100644 (file)
index 0000000..0dbdca9
--- /dev/null
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2018 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "RemoteVideoSample.h"
+
+#include "GraphicsContextCG.h"
+#include "IOSurface.h"
+#include "Logging.h"
+#include "RealtimeVideoUtilities.h"
+
+#if HAVE(IOSURFACE)
+#include "MediaSampleAVFObjc.h"
+#endif
+
+#import <pal/cf/CoreMediaSoftLink.h>
+
+namespace WebCore {
+using namespace PAL;
+
+#if HAVE(IOSURFACE)
+std::unique_ptr<RemoteVideoSample> RemoteVideoSample::create(MediaSample&& sample)
+{
+    ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
+
+    auto imageBuffer = CMSampleBufferGetImageBuffer(sample.platformSample().sample.cmSampleBuffer);
+    if (!imageBuffer)
+        return nullptr;
+
+    auto surface = CVPixelBufferGetIOSurface(imageBuffer);
+    if (!surface) {
+        RELEASE_LOG(Media, "RemoteVideoSample::create: CVPixelBufferGetIOSurface returned nullptr");
+        return nullptr;
+    }
+
+    return std::unique_ptr<RemoteVideoSample>(new RemoteVideoSample(surface, sRGBColorSpaceRef(), sample.presentationTime(), sample.videoRotation(), sample.videoMirrored()));
+}
+
+RemoteVideoSample::RemoteVideoSample(IOSurfaceRef surface, CGColorSpaceRef colorSpace, MediaTime&& time, MediaSample::VideoRotation rotation, bool mirrored)
+    : m_ioSurface(WebCore::IOSurface::createFromSurface(surface, colorSpace))
+    , m_rotation(rotation)
+    , m_time(WTFMove(time))
+    , m_videoFormat(IOSurfaceGetPixelFormat(surface))
+    , m_size(IntSize(IOSurfaceGetWidth(surface), IOSurfaceGetHeight(surface)))
+    , m_mirrored(mirrored)
+{
+}
+
+IOSurfaceRef RemoteVideoSample::surface()
+{
+    if (!m_ioSurface && m_sendRight)
+        m_ioSurface = WebCore::IOSurface::createFromSendRight(WTFMove(m_sendRight), sRGBColorSpaceRef());
+
+    return m_ioSurface ? m_ioSurface->surface() : nullptr;
+}
+#endif
+
+}
diff --git a/Source/WebCore/platform/graphics/RemoteVideoSample.h b/Source/WebCore/platform/graphics/RemoteVideoSample.h
new file mode 100644 (file)
index 0000000..46bcead
--- /dev/null
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2018 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include "MediaSample.h"
+#include "RemoteVideoSample.h"
+#include <wtf/MachSendRight.h>
+#include <wtf/MediaTime.h>
+
+#if HAVE(IOSURFACE)
+#include "IOSurface.h"
+#endif
+
+namespace WebCore {
+
+class RemoteVideoSample {
+public:
+    RemoteVideoSample() = default;
+    ~RemoteVideoSample() = default;
+
+#if HAVE(IOSURFACE)
+    WEBCORE_EXPORT static std::unique_ptr<RemoteVideoSample> create(MediaSample&&);
+    WEBCORE_EXPORT IOSurfaceRef surface();
+#endif
+
+    const MediaTime& time() const { return m_time; }
+    uint32_t videoFormat() const { return m_videoFormat; }
+    IntSize size() const { return m_size; }
+
+    template<class Encoder> void encode(Encoder& encoder) const
+    {
+#if HAVE(IOSURFACE)
+        if (m_ioSurface)
+            encoder << m_ioSurface->createSendRight();
+        else
+            encoder << WTF::MachSendRight();
+#endif
+        encoder << m_rotation;
+        encoder << m_time;
+        encoder << m_videoFormat;
+        encoder << m_size;
+        encoder << m_mirrored;
+    }
+
+    template<class Decoder> static bool decode(Decoder& decoder, RemoteVideoSample& sample)
+    {
+#if HAVE(IOSURFACE)
+        MachSendRight sendRight;
+        if (!decoder.decode(sendRight))
+            return false;
+        sample.m_sendRight = WTFMove(sendRight);
+#endif
+        MediaSample::VideoRotation rotation;
+        if (!decoder.decode(rotation))
+            return false;
+        sample.m_rotation = rotation;
+
+        MediaTime time;
+        if (!decoder.decode(time))
+            return false;
+        sample.m_time = WTFMove(time);
+
+        uint32_t format;
+        if (!decoder.decode(format))
+            return false;
+        sample.m_videoFormat = format;
+
+        IntSize size;
+        if (!decoder.decode(size))
+            return false;
+        sample.m_size = WTFMove(size);
+
+        bool mirrored;
+        if (!decoder.decode(mirrored))
+            return false;
+        sample.m_mirrored = mirrored;
+
+        return true;
+    }
+
+private:
+
+#if HAVE(IOSURFACE)
+    RemoteVideoSample(IOSurfaceRef, CGColorSpaceRef, MediaTime&&, MediaSample::VideoRotation, bool);
+
+    std::unique_ptr<WebCore::IOSurface> m_ioSurface;
+    WTF::MachSendRight m_sendRight;
+#endif
+    MediaSample::VideoRotation m_rotation { MediaSample::VideoRotation::None };
+    MediaTime m_time;
+    uint32_t m_videoFormat { 0 };
+    IntSize m_size;
+    bool m_mirrored { false };
+};
+
+}
+
index 0923883..60ee96f 100644 (file)
@@ -64,7 +64,7 @@ public:
 
     VideoRotation videoRotation() const override { return m_rotation; }
     bool videoMirrored() const override { return m_mirrored; }
-    uint32_t videoPixelFormat() const;
+    uint32_t videoPixelFormat() const final;
 
     CMSampleBufferRef sampleBuffer() const { return m_sample.get(); }
 
index 5c32314..5672b85 100644 (file)
@@ -52,7 +52,7 @@ public:
     RefPtr<MediaSample> createMediaSample(CMSampleBufferRef, const IntSize&, MediaSample::VideoRotation = MediaSample::VideoRotation::None, bool mirrored = false);
 
 #if HAVE(IOSURFACE) && !PLATFORM(IOSMAC)
-    RefPtr<MediaSample> createMediaSample(IOSurfaceRef, const MediaTime&, const IntSize&, MediaSample::VideoRotation = MediaSample::VideoRotation::None, bool mirrored = false);
+    WEBCORE_EXPORT RefPtr<MediaSample> createMediaSample(IOSurfaceRef, const MediaTime&, const IntSize&, MediaSample::VideoRotation = MediaSample::VideoRotation::None, bool mirrored = false);
 #endif
 
 private:
index 94b19cd..dbd58b3 100644 (file)
@@ -148,7 +148,7 @@ RetainPtr<CMSampleBufferRef> ImageTransferSessionVT::convertCMSampleBuffer(CMSam
     auto description = CMSampleBufferGetFormatDescription(sourceBuffer);
     auto sourceSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(description, true, true));
     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sourceBuffer));
-    if (size == roundedIntSize(sourceSize) && m_pixelFormat == CVPixelBufferGetPixelFormatType(pixelBuffer))
+    if (size == expandedIntSize(sourceSize) && m_pixelFormat == CVPixelBufferGetPixelFormatType(pixelBuffer))
         return retainPtr(sourceBuffer);
 
     if (!setSize(size))
@@ -340,7 +340,7 @@ RefPtr<MediaSample> ImageTransferSessionVT::convertMediaSample(MediaSample& samp
 {
     ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
 
-    if (size == roundedIntSize(sample.presentationSize()))
+    if (size == expandedIntSize(sample.presentationSize()))
         return &sample;
 
     auto resizedBuffer = convertCMSampleBuffer(sample.platformSample().sample.cmSampleBuffer, size);
index ec8b5b9..f2dd46c 100644 (file)
@@ -158,6 +158,13 @@ void RealtimeMediaSource::videoSampleAvailable(MediaSample& mediaSample)
     });
 }
 
+void RealtimeMediaSource::remoteVideoSampleAvailable(RemoteVideoSample&& sample)
+{
+    forEachObserver([&](auto& observer) {
+        observer.remoteVideoSampleAvailable(sample);
+    });
+}
+
 void RealtimeMediaSource::audioSamplesAvailable(const MediaTime& time, const PlatformAudioData& audioData, const AudioStreamDescription& description, size_t numberOfFrames)
 {
     forEachObserver([&](auto& observer) {
index a93de33..7aa860c 100644 (file)
@@ -61,6 +61,7 @@ class MediaStreamPrivate;
 class OrientationNotifier;
 class PlatformAudioData;
 class RealtimeMediaSourceSettings;
+class RemoteVideoSample;
 
 struct CaptureSourceOrError;
 
@@ -81,6 +82,7 @@ public:
 
         // Called on the main thread.
         virtual void videoSampleAvailable(MediaSample&) { }
+        virtual void remoteVideoSampleAvailable(RemoteVideoSample&) { }
 
         // May be called on a background thread.
         virtual void audioSamplesAvailable(const MediaTime&, const PlatformAudioData&, const AudioStreamDescription&, size_t /*numberOfFrames*/) { }
@@ -165,6 +167,9 @@ public:
     virtual bool isIncomingAudioSource() const { return false; }
     virtual bool isIncomingVideoSource() const { return false; }
 
+    void setIsRemote(bool isRemote) { m_isRemote = isRemote; }
+    bool isRemote() const { return m_isRemote; }
+
     // Testing only
     virtual void delaySamples(Seconds) { };
 
@@ -196,13 +201,13 @@ protected:
 
     void videoSampleAvailable(MediaSample&);
     void audioSamplesAvailable(const MediaTime&, const PlatformAudioData&, const AudioStreamDescription&, size_t);
+    void remoteVideoSampleAvailable(RemoteVideoSample&&);
 
 private:
     virtual void startProducingData() { }
     virtual void stopProducingData() { }
     virtual void settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>) { }
 
-
     void forEachObserver(const WTF::Function<void(Observer&)>&) const;
 
     bool m_muted { false };
@@ -228,6 +233,7 @@ private:
     bool m_isProducingData { false };
     bool m_interrupted { false };
     bool m_captureDidFailed { false };
+    bool m_isRemote { false };
 };
 
 struct CaptureSourceOrError {
index 149133d..0b4a6ed 100644 (file)
 #include "Logging.h"
 #include "RealtimeMediaSourceCenter.h"
 #include "RealtimeMediaSourceSettings.h"
+#include "RemoteVideoSample.h"
+
+#if PLATFORM(COCOA)
+#include "ImageTransferSessionVT.h"
+#endif
 
 namespace WebCore {
 
@@ -378,7 +383,36 @@ void RealtimeVideoSource::dispatchMediaSampleToObservers(MediaSample& sample)
     if (interval > 1)
         m_observedFrameRate = (m_observedFrameTimeStamps.size() / interval);
 
-    videoSampleAvailable(sample);
+    if (isRemote()) {
+#if HAVE(IOSURFACE)
+        auto remoteSample = RemoteVideoSample::create(WTFMove(sample));
+        if (remoteSample)
+            remoteVideoSampleAvailable(WTFMove(*remoteSample));
+#else
+        ASSERT_NOT_REACHED();
+#endif
+        return;
+    }
+
+    auto mediaSample = makeRefPtr(&sample);
+#if PLATFORM(COCOA)
+    auto size = this->size();
+    if (!size.isEmpty() && size != expandedIntSize(sample.presentationSize())) {
+
+        if (!m_imageTransferSession)
+            m_imageTransferSession = ImageTransferSessionVT::create(sample.videoPixelFormat());
+
+        if (m_imageTransferSession) {
+            mediaSample = m_imageTransferSession->convertMediaSample(sample, size);
+            if (!mediaSample) {
+                ASSERT_NOT_REACHED();
+                return;
+            }
+        }
+    }
+#endif
+
+    videoSampleAvailable(mediaSample.releaseNonNull());
 }
 
 } // namespace WebCore
index 47c4f5e..84268ab 100644 (file)
@@ -37,6 +37,8 @@
 
 namespace WebCore {
 
+class ImageTransferSessionVT;
+
 class RealtimeVideoSource : public RealtimeMediaSource {
 public:
     virtual ~RealtimeVideoSource();
@@ -83,6 +85,9 @@ private:
     Deque<double> m_observedFrameTimeStamps;
     double m_observedFrameRate { 0 };
     IntSize m_defaultSize;
+#if PLATFORM(COCOA)
+    std::unique_ptr<ImageTransferSessionVT> m_imageTransferSession;
+#endif
 };
 
 } // namespace WebCore
index ec43ae0..83afed7 100644 (file)
@@ -540,27 +540,9 @@ void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
 
 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
 {
-    CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
-    if (!formatDescription)
-        return;
-
-    CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
-    RefPtr<MediaSample> sample;
-    if (dimensions.width != m_requestedSize.width() || dimensions.height != m_requestedSize.height()) {
-
-        if (!m_imageTransferSession)
-            m_imageTransferSession = ImageTransferSessionVT::create(avVideoCapturePixelBufferFormat());
-
-        sample = m_imageTransferSession->createMediaSample(sampleBuffer, m_requestedSize, m_sampleRotation, [captureConnection isVideoMirrored]);
-        if (!sample) {
-            ASSERT_NOT_REACHED();
-            return;
-        }
-    } else
-        sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
-
+    auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
-        processNewFrame(sample.releaseNonNull());
+        processNewFrame(WTFMove(sample));
     });
 }
 
index 343574e..3372fa2 100644 (file)
@@ -35,6 +35,7 @@
 #include "RealtimeMediaSourceCenter.h"
 #include "RealtimeMediaSourceSettings.h"
 #include "RealtimeVideoUtilities.h"
+#include "RemoteVideoSample.h"
 #include "Timer.h"
 #include <CoreMedia/CMSync.h>
 #include <mach/mach_time.h>
@@ -172,22 +173,44 @@ void DisplayCaptureSourceCocoa::emitFrame()
         m_imageTransferSession = ImageTransferSessionVT::create(preferedPixelBufferFormat());
 
     auto sampleTime = MediaTime::createWithDouble((elapsedTime() + 100_ms).seconds());
+
     auto frame = generateFrame();
-    IntSize imageSize;
+    auto imageSize = WTF::switchOn(frame,
+        [](RetainPtr<IOSurfaceRef> surface) -> IntSize {
+            if (!surface)
+                return { };
+
+            return IntSize(IOSurfaceGetWidth(surface.get()), IOSurfaceGetHeight(surface.get()));
+        },
+        [](RetainPtr<CGImageRef> image) -> IntSize {
+            if (!image)
+                return { };
+
+            return IntSize(CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
+        }
+    );
+
+    ASSERT(!imageSize.isEmpty());
+    if (imageSize.isEmpty())
+        return;
+
+    if (m_intrinsicSize != imageSize)
+        setIntrinsicSize(imageSize);
+
+    auto mediaSampleSize = isRemote() ? imageSize : frameSize();
+
     RefPtr<MediaSample> sample = WTF::switchOn(frame,
-        [this, sampleTime, &imageSize](RetainPtr<IOSurfaceRef> surface) -> RefPtr<MediaSample> {
+        [this, sampleTime, mediaSampleSize](RetainPtr<IOSurfaceRef> surface) -> RefPtr<MediaSample> {
             if (!surface)
                 return nullptr;
 
-            imageSize = IntSize(IOSurfaceGetWidth(surface.get()), IOSurfaceGetHeight(surface.get()));
-            return m_imageTransferSession->createMediaSample(surface.get(), sampleTime, imageSize);
+            return m_imageTransferSession->createMediaSample(surface.get(), sampleTime, mediaSampleSize);
         },
-        [this, sampleTime, &imageSize](RetainPtr<CGImageRef> image) -> RefPtr<MediaSample> {
+        [this, sampleTime, mediaSampleSize](RetainPtr<CGImageRef> image) -> RefPtr<MediaSample> {
             if (!image)
                 return nullptr;
 
-            imageSize = IntSize(CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
-            return m_imageTransferSession->createMediaSample(image.get(), sampleTime, imageSize);
+            return m_imageTransferSession->createMediaSample(image.get(), sampleTime, mediaSampleSize);
         }
     );
 
@@ -196,8 +219,13 @@ void DisplayCaptureSourceCocoa::emitFrame()
         return;
     }
 
-    if (m_intrinsicSize != imageSize)
-        setIntrinsicSize(imageSize);
+    if (isRemote()) {
+        auto remoteSample = RemoteVideoSample::create(WTFMove(*sample));
+        if (remoteSample)
+            remoteVideoSampleAvailable(WTFMove(*remoteSample));
+
+        return;
+    }
 
     videoSampleAvailable(*sample.get());
 #endif
index 0ae9989..0e879ff 100644 (file)
@@ -1,3 +1,48 @@
+2018-10-18  Eric Carlson  <eric.carlson@apple.com>
+
+        [MediaStream] Allow ports to optionally do screen capture in the UI process
+        https://bugs.webkit.org/show_bug.cgi?id=190728
+        <rdar://problem/45376824>
+
+        Reviewed by Jer Noble and Tim Horton.
+
+        * Shared/WebProcessCreationParameters.cpp:
+        (WebKit::WebProcessCreationParameters::encode const): Encode shouldCaptureDisplayInUIProcess.
+        (WebKit::WebProcessCreationParameters::decode): Decode shouldCaptureDisplayInUIProcess.
+        * Shared/WebProcessCreationParameters.h:
+
+        * UIProcess/API/APIProcessPoolConfiguration.cpp:
+        (API::ProcessPoolConfiguration::copy): Copy shouldCaptureDisplayInUIProcess.
+        * UIProcess/API/APIProcessPoolConfiguration.h:
+
+        * UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp:
+        (WebKit::UserMediaCaptureManagerProxy::SourceProxy::remoteVideoSampleAvailable):
+        (WebKit::UserMediaCaptureManagerProxy::createMediaSourceForCaptureDeviceWithConstraints): Remove
+        RealtimeMediaSource::Type parameter, CaptureDevice has the same information. Deal with display
+        capture "devices".
+        * UIProcess/Cocoa/UserMediaCaptureManagerProxy.h:
+        * UIProcess/Cocoa/UserMediaCaptureManagerProxy.messages.in:
+
+        * UIProcess/WebProcessPool.cpp:
+        (WebKit::WebProcessPool::initializeNewWebProcess): Copy shouldCaptureDisplayInUIProcess.
+
+        * WebProcess/cocoa/UserMediaCaptureManager.cpp:
+        (WebKit::UserMediaCaptureManager::Source::Source): Only allocate a ring buffer for Audio sources.
+        (WebKit::UserMediaCaptureManager::Source::~Source): Same for deallocate.
+        (WebKit::UserMediaCaptureManager::Source::storage): m_ringBuffer is a pointer.
+        (WebKit::UserMediaCaptureManager::Source::setStorage): Ditto.
+        (WebKit::UserMediaCaptureManager::Source::setRingBufferFrameBounds): Ditto.
+        (WebKit::UserMediaCaptureManager::Source::audioSamplesAvailable): Ditto.
+        (WebKit::UserMediaCaptureManager::Source::remoteVideoSampleAvailable): Create a 
+        PixelBuffer-backed media sample and call videoSampleAvailable.
+        (WebKit::UserMediaCaptureManager::~UserMediaCaptureManager): Clear the audio and display capture
+        factory overrides.
+        (WebKit::UserMediaCaptureManager::initialize): Set the audio and display capture factory overrides.
+        (WebKit::UserMediaCaptureManager::createCaptureSource):
+        (WebKit::UserMediaCaptureManager::remoteVideoSampleAvailable):
+        * WebProcess/cocoa/UserMediaCaptureManager.h:
+        * WebProcess/cocoa/UserMediaCaptureManager.messages.in:
+
 2018-10-18  Chris Dumez  <cdumez@apple.com>
 
         [PSON] SuspendedPages do not report meaningful domains in Activity Monitor
index 1766bf0..47e88f4 100644 (file)
@@ -72,6 +72,7 @@ void WebProcessCreationParameters::encode(IPC::Encoder& encoder) const
 #if ENABLE(MEDIA_STREAM)
     encoder << audioCaptureExtensionHandle;
     encoder << shouldCaptureAudioInUIProcess;
+    encoder << shouldCaptureDisplayInUIProcess;
 #endif
     encoder << shouldUseTestingNetworkSession;
     encoder << urlSchemesRegisteredAsEmptyDocument;
@@ -264,6 +265,8 @@ bool WebProcessCreationParameters::decode(IPC::Decoder& decoder, WebProcessCreat
 
     if (!decoder.decode(parameters.shouldCaptureAudioInUIProcess))
         return false;
+    if (!decoder.decode(parameters.shouldCaptureDisplayInUIProcess))
+        return false;
 #endif
     if (!decoder.decode(parameters.shouldUseTestingNetworkSession))
         return false;
index 31db8b0..f7cd3cb 100644 (file)
@@ -96,6 +96,7 @@ struct WebProcessCreationParameters {
 #if ENABLE(MEDIA_STREAM)
     SandboxExtension::Handle audioCaptureExtensionHandle;
     bool shouldCaptureAudioInUIProcess { false };
+    bool shouldCaptureDisplayInUIProcess { false };
 #endif
     String mediaKeyStorageDirectory;
 
index 996c0dd..9d2e018 100644 (file)
@@ -115,6 +115,7 @@ Ref<ProcessPoolConfiguration> ProcessPoolConfiguration::copy()
     copy->m_alwaysRunsAtBackgroundPriority = this->m_alwaysRunsAtBackgroundPriority;
     copy->m_shouldTakeUIBackgroundAssertion = this->m_shouldTakeUIBackgroundAssertion;
     copy->m_shouldCaptureAudioInUIProcess = this->m_shouldCaptureAudioInUIProcess;
+    copy->m_shouldCaptureDisplayInUIProcess = this->m_shouldCaptureDisplayInUIProcess;
 #if PLATFORM(IOS_FAMILY)
     copy->m_ctDataConnectionServiceType = this->m_ctDataConnectionServiceType;
 #endif
index 4d38b14..e22ffb4 100644 (file)
 
 namespace API {
 
+#if PLATFORM(COCOA) && !PLATFORM(IOS_FAMILY_SIMULATOR)
+#define DEFAULT_CAPTURE_DISPLAY_IN_UI_PROCESS true
+#else
+#define DEFAULT_CAPTURE_DISPLAY_IN_UI_PROCESS false
+#endif
+
 class ProcessPoolConfiguration final : public ObjectImpl<Object::Type::ProcessPoolConfiguration> {
 public:
     static Ref<ProcessPoolConfiguration> create();
@@ -133,6 +139,9 @@ public:
     bool shouldCaptureAudioInUIProcess() const { return m_shouldCaptureAudioInUIProcess; }
     void setShouldCaptureAudioInUIProcess(bool shouldCaptureAudioInUIProcess) { m_shouldCaptureAudioInUIProcess = shouldCaptureAudioInUIProcess; }
 
+    bool shouldCaptureDisplayInUIProcess() const { return m_shouldCaptureDisplayInUIProcess; }
+    void setShouldCaptureDisplayInUIProcess(bool shouldCaptureDisplayInUIProcess) { m_shouldCaptureDisplayInUIProcess = shouldCaptureDisplayInUIProcess; }
+
 #if PLATFORM(IOS_FAMILY)
     const WTF::String& ctDataConnectionServiceType() const { return m_ctDataConnectionServiceType; }
     void setCTDataConnectionServiceType(const WTF::String& ctDataConnectionServiceType) { m_ctDataConnectionServiceType = ctDataConnectionServiceType; }
@@ -194,6 +203,7 @@ private:
     bool m_alwaysRunsAtBackgroundPriority { false };
     bool m_shouldTakeUIBackgroundAssertion { true };
     bool m_shouldCaptureAudioInUIProcess { false };
+    bool m_shouldCaptureDisplayInUIProcess { DEFAULT_CAPTURE_DISPLAY_IN_UI_PROCESS };
     ProcessID m_presentingApplicationPID { getCurrentProcessID() };
     bool m_processSwapsOnNavigation { false };
     bool m_alwaysKeepAndReuseSwappedProcesses { false };
index 38aab26..af83b63 100644 (file)
@@ -35,6 +35,7 @@
 #include <WebCore/CARingBuffer.h>
 #include <WebCore/MediaConstraints.h>
 #include <WebCore/RealtimeMediaSourceCenter.h>
+#include <WebCore/RemoteVideoSample.h>
 #include <WebCore/WebAudioBufferList.h>
 #include <wtf/UniqueRef.h>
 
@@ -98,6 +99,11 @@ public:
         m_manager.process().send(Messages::UserMediaCaptureManager::AudioSamplesAvailable(m_id, time, numberOfFrames, startFrame, endFrame), 0);
     }
 
+    virtual void remoteVideoSampleAvailable(RemoteVideoSample& sample)
+    {
+        m_manager.process().send(Messages::UserMediaCaptureManager::RemoteVideoSampleAvailable(m_id, WTFMove(sample)), 0);
+    }
+
     void storageChanged(SharedMemory* storage) final {
         SharedMemory::Handle handle;
         if (storage)
@@ -125,17 +131,23 @@ UserMediaCaptureManagerProxy::~UserMediaCaptureManagerProxy()
     m_process.removeMessageReceiver(Messages::UserMediaCaptureManagerProxy::messageReceiverName());
 }
 
-void UserMediaCaptureManagerProxy::createMediaSourceForCaptureDeviceWithConstraints(uint64_t id, const CaptureDevice& device, WebCore::RealtimeMediaSource::Type type, String&& hashSalt, const MediaConstraints& constraints, bool& succeeded, String& invalidConstraints, WebCore::RealtimeMediaSourceSettings& settings)
+void UserMediaCaptureManagerProxy::createMediaSourceForCaptureDeviceWithConstraints(uint64_t id, const CaptureDevice& device, String&& hashSalt, const MediaConstraints& constraints, bool& succeeded, String& invalidConstraints, WebCore::RealtimeMediaSourceSettings& settings)
 {
     CaptureSourceOrError sourceOrError;
-    switch (type) {
-    case WebCore::RealtimeMediaSource::Type::Audio:
-        sourceOrError = RealtimeMediaSourceCenter::singleton().audioFactory().createAudioCaptureSource(device, WTFMove(hashSalt), &constraints);
+    switch (device.type()) {
+    case WebCore::CaptureDevice::DeviceType::Microphone:
+        sourceOrError = RealtimeMediaSourceCenter::audioFactory().createAudioCaptureSource(device, WTFMove(hashSalt), &constraints);
+        break;
+    case WebCore::CaptureDevice::DeviceType::Camera:
+        sourceOrError = RealtimeMediaSourceCenter::videoFactory().createVideoCaptureSource(device, WTFMove(hashSalt), &constraints);
         break;
-    case WebCore::RealtimeMediaSource::Type::Video:
-        sourceOrError = RealtimeMediaSourceCenter::singleton().videoFactory().createVideoCaptureSource(device, WTFMove(hashSalt), &constraints);
+    case WebCore::CaptureDevice::DeviceType::Screen:
+    case WebCore::CaptureDevice::DeviceType::Window:
+    case WebCore::CaptureDevice::DeviceType::Application:
+    case WebCore::CaptureDevice::DeviceType::Browser:
+        sourceOrError = RealtimeMediaSourceCenter::displayCaptureFactory().createDisplayCaptureSource(device, &constraints);
         break;
-    case WebCore::RealtimeMediaSource::Type::None:
+    case WebCore::CaptureDevice::DeviceType::Unknown:
         ASSERT_NOT_REACHED();
         break;
     }
@@ -143,6 +155,7 @@ void UserMediaCaptureManagerProxy::createMediaSourceForCaptureDeviceWithConstrai
     succeeded = !!sourceOrError;
     if (sourceOrError) {
         auto source = sourceOrError.source();
+        source->setIsRemote(true);
         settings = source->settings();
         m_proxies.set(id, std::make_unique<SourceProxy>(id, *this, WTFMove(source)));
     } else
index ecaeae2..3bf921f 100644 (file)
@@ -29,6 +29,7 @@
 
 #include "Connection.h"
 #include "MessageReceiver.h"
+#include "UserMediaCaptureManager.h"
 #include "UserMediaCaptureManagerProxyMessages.h"
 #include <WebCore/RealtimeMediaSource.h>
 
@@ -49,7 +50,7 @@ private:
     void didReceiveMessage(IPC::Connection&, IPC::Decoder&) final;
     void didReceiveSyncMessage(IPC::Connection&, IPC::Decoder&, std::unique_ptr<IPC::Encoder>&) final;
 
-    void createMediaSourceForCaptureDeviceWithConstraints(uint64_t id, const WebCore::CaptureDevice& deviceID, WebCore::RealtimeMediaSource::Type, String&&, const WebCore::MediaConstraints&, bool& succeeded, String& invalidConstraints, WebCore::RealtimeMediaSourceSettings&);
+    void createMediaSourceForCaptureDeviceWithConstraints(uint64_t id, const WebCore::CaptureDevice& deviceID, String&&, const WebCore::MediaConstraints&, bool& succeeded, String& invalidConstraints, WebCore::RealtimeMediaSourceSettings&);
     void startProducingData(uint64_t);
     void stopProducingData(uint64_t);
     void capabilities(uint64_t, WebCore::RealtimeMediaSourceCapabilities&);
index bd219bc..45b1c38 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 Apple Inc. All rights reserved.
+# Copyright (C) 2017-2018 Apple Inc. All rights reserved.
 #
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions
@@ -24,7 +24,7 @@
 #if ENABLE(MEDIA_STREAM)
 
 messages -> UserMediaCaptureManagerProxy {
-    CreateMediaSourceForCaptureDeviceWithConstraints(uint64_t id, WebCore::CaptureDevice device, WebCore::RealtimeMediaSource::Type type, String hashSalt, struct WebCore::MediaConstraints constraints) -> (bool success, String invalidConstraints, WebCore::RealtimeMediaSourceSettings settings)
+CreateMediaSourceForCaptureDeviceWithConstraints(uint64_t id, WebCore::CaptureDevice device, String hashSalt, struct WebCore::MediaConstraints constraints) -> (bool success, String invalidConstraints, WebCore::RealtimeMediaSourceSettings settings)
     StartProducingData(uint64_t id)
     StopProducingData(uint64_t id)
     Capabilities(uint64_t id) -> (WebCore::RealtimeMediaSourceCapabilities capabilities)
index 85ffb9a..731ef4b 100644 (file)
@@ -907,6 +907,7 @@ void WebProcessPool::initializeNewWebProcess(WebProcessProxy& process, WebsiteDa
     parameters.resourceLoadStatisticsEnabled = websiteDataStore.resourceLoadStatisticsEnabled();
 #if ENABLE(MEDIA_STREAM)
     parameters.shouldCaptureAudioInUIProcess = m_configuration->shouldCaptureAudioInUIProcess();
+    parameters.shouldCaptureDisplayInUIProcess = m_configuration->shouldCaptureDisplayInUIProcess();
 #endif
 
     parameters.presentingApplicationPID = m_configuration->presentingApplicationPID();
index cb6d6c7..7236a5b 100644 (file)
 #include "WebProcess.h"
 #include "WebProcessCreationParameters.h"
 #include <WebCore/CaptureDevice.h>
+#include <WebCore/ImageTransferSessionVT.h>
 #include <WebCore/MediaConstraints.h>
 #include <WebCore/RealtimeMediaSourceCenter.h>
+#include <WebCore/RemoteVideoSample.h>
 #include <WebCore/WebAudioBufferList.h>
 #include <WebCore/WebAudioSourceProviderAVFObjC.h>
+#include <wtf/Assertions.h>
 
 namespace WebKit {
+using namespace PAL;
 using namespace WebCore;
 
 static uint64_t nextSessionID()
@@ -55,16 +59,22 @@ public:
         : RealtimeMediaSource(type, WTFMove(name), WTFMove(sourceID), WTFMove(hashSalt))
         , m_id(id)
         , m_manager(manager)
-        , m_ringBuffer(makeUniqueRef<SharedRingBufferStorage>(nullptr))
     {
+        if (type == Type::Audio)
+            m_ringBuffer = std::make_unique<CARingBuffer>(makeUniqueRef<SharedRingBufferStorage>(nullptr));
     }
 
     ~Source()
     {
-        storage().invalidate();
+        if (type() == Type::Audio)
+            storage().invalidate();
     }
 
-    SharedRingBufferStorage& storage() { return static_cast<SharedRingBufferStorage&>(m_ringBuffer.storage()); }
+    SharedRingBufferStorage& storage()
+    {
+        ASSERT(type() == Type::Audio);
+        return static_cast<SharedRingBufferStorage&>(m_ringBuffer->storage());
+    }
 
     const RealtimeMediaSourceCapabilities& capabilities() final
     {
@@ -84,10 +94,11 @@ public:
     const CAAudioStreamDescription& description() const { return m_description; }
     void setStorage(const SharedMemory::Handle& handle, const WebCore::CAAudioStreamDescription& description, uint64_t numberOfFrames)
     {
+        ASSERT(type() == Type::Audio);
         m_description = description;
 
         if (handle.isNull()) {
-            m_ringBuffer.deallocate();
+            m_ringBuffer->deallocate();
             storage().setReadOnly(false);
             storage().setStorage(nullptr);
             return;
@@ -97,22 +108,51 @@ public:
         storage().setStorage(WTFMove(memory));
         storage().setReadOnly(true);
 
-        m_ringBuffer.allocate(description, numberOfFrames);
+        m_ringBuffer->allocate(description, numberOfFrames);
     }
 
     void setRingBufferFrameBounds(uint64_t startFrame, uint64_t endFrame)
     {
-        m_ringBuffer.setCurrentFrameBounds(startFrame, endFrame);
+        ASSERT(type() == Type::Audio);
+        m_ringBuffer->setCurrentFrameBounds(startFrame, endFrame);
     }
 
     void audioSamplesAvailable(MediaTime time, uint64_t numberOfFrames)
     {
+        ASSERT(type() == Type::Audio);
         WebAudioBufferList audioData(m_description, numberOfFrames);
-        m_ringBuffer.fetch(audioData.list(), numberOfFrames, time.timeValue());
+        m_ringBuffer->fetch(audioData.list(), numberOfFrames, time.timeValue());
 
         RealtimeMediaSource::audioSamplesAvailable(time, audioData, m_description, numberOfFrames);
     }
 
+#if HAVE(IOSURFACE)
+    void remoteVideoSampleAvailable(RemoteVideoSample&& remoteSample)
+    {
+        ASSERT(type() == Type::Video);
+
+        auto videoSampleSize = IntSize(m_settings.width(), m_settings.height());
+        if (videoSampleSize.isEmpty())
+            videoSampleSize = remoteSample.size();
+
+        if (!m_imageTransferSession)
+            m_imageTransferSession = ImageTransferSessionVT::create(remoteSample.videoFormat());
+
+        if (!m_imageTransferSession) {
+            ASSERT_NOT_REACHED();
+            return;
+        }
+
+        auto sampleRef = m_imageTransferSession->createMediaSample(remoteSample.surface(), remoteSample.time(), videoSampleSize);
+        if (!sampleRef) {
+            ASSERT_NOT_REACHED();
+            return;
+        }
+
+        RealtimeMediaSource::videoSampleAvailable(*sampleRef);
+    }
+#endif
+
     void applyConstraintsSucceeded(const WebCore::RealtimeMediaSourceSettings& settings)
     {
         auto callbacks = m_pendingApplyConstraintsCallbacks.takeFirst();
@@ -144,8 +184,11 @@ private:
     UserMediaCaptureManager& m_manager;
     mutable std::optional<RealtimeMediaSourceCapabilities> m_capabilities;
     RealtimeMediaSourceSettings m_settings;
+
     CAAudioStreamDescription m_description;
-    CARingBuffer m_ringBuffer;
+    std::unique_ptr<CARingBuffer> m_ringBuffer;
+
+    std::unique_ptr<ImageTransferSessionVT> m_imageTransferSession;
 
     struct ApplyConstraintsCallback {
         SuccessHandler successHandler;
@@ -162,7 +205,8 @@ UserMediaCaptureManager::UserMediaCaptureManager(WebProcess& process)
 
 UserMediaCaptureManager::~UserMediaCaptureManager()
 {
-    RealtimeMediaSourceCenter::singleton().unsetAudioFactory(*this);
+    RealtimeMediaSourceCenter::unsetAudioFactory(*this);
+    RealtimeMediaSourceCenter::unsetDisplayCaptureFactory(*this);
     m_process.removeMessageReceiver(Messages::UserMediaCaptureManager::messageReceiverName());
 }
 
@@ -174,10 +218,12 @@ const char* UserMediaCaptureManager::supplementName()
 void UserMediaCaptureManager::initialize(const WebProcessCreationParameters& parameters)
 {
     if (parameters.shouldCaptureAudioInUIProcess)
-        RealtimeMediaSourceCenter::singleton().setAudioFactory(*this);
+        RealtimeMediaSourceCenter::setAudioFactory(*this);
+    if (parameters.shouldCaptureDisplayInUIProcess)
+        RealtimeMediaSourceCenter::setDisplayCaptureFactory(*this);
 }
 
-WebCore::CaptureSourceOrError UserMediaCaptureManager::createCaptureSource(const CaptureDevice& device, WebCore::RealtimeMediaSource::Type sourceType, String&& hashSalt, const WebCore::MediaConstraints* constraints)
+WebCore::CaptureSourceOrError UserMediaCaptureManager::createCaptureSource(const CaptureDevice& device, String&& hashSalt, const WebCore::MediaConstraints* constraints)
 {
     if (!constraints)
         return { };
@@ -186,10 +232,11 @@ WebCore::CaptureSourceOrError UserMediaCaptureManager::createCaptureSource(const
     RealtimeMediaSourceSettings settings;
     String errorMessage;
     bool succeeded;
-    if (!m_process.sendSync(Messages::UserMediaCaptureManagerProxy::CreateMediaSourceForCaptureDeviceWithConstraints(id, device, sourceType, hashSalt, *constraints), Messages::UserMediaCaptureManagerProxy::CreateMediaSourceForCaptureDeviceWithConstraints::Reply(succeeded, errorMessage, settings), 0))
+    if (!m_process.sendSync(Messages::UserMediaCaptureManagerProxy::CreateMediaSourceForCaptureDeviceWithConstraints(id, device, hashSalt, *constraints), Messages::UserMediaCaptureManagerProxy::CreateMediaSourceForCaptureDeviceWithConstraints::Reply(succeeded, errorMessage, settings), 0))
         return WTFMove(errorMessage);
 
-    auto source = adoptRef(*new Source(String::number(id), sourceType, String { settings.label() }, WTFMove(hashSalt), id, *this));
+    auto type = device.type() == CaptureDevice::DeviceType::Microphone ? WebCore::RealtimeMediaSource::Type::Audio : WebCore::RealtimeMediaSource::Type::Video;
+    auto source = adoptRef(*new Source(String::number(id), type, String { settings.label() }, WTFMove(hashSalt), id, *this));
     source->setSettings(WTFMove(settings));
     m_sources.set(id, source.copyRef());
     return WebCore::CaptureSourceOrError(WTFMove(source));
@@ -239,6 +286,19 @@ void UserMediaCaptureManager::audioSamplesAvailable(uint64_t id, MediaTime time,
     source.audioSamplesAvailable(time, numberOfFrames);
 }
 
+#if HAVE(IOSURFACE)
+void UserMediaCaptureManager::remoteVideoSampleAvailable(uint64_t id, RemoteVideoSample&& sample)
+{
+    ASSERT(m_sources.contains(id));
+    m_sources.get(id)->remoteVideoSampleAvailable(WTFMove(sample));
+}
+#else
+NO_RETURN_DUE_TO_ASSERT void UserMediaCaptureManager::remoteVideoSampleAvailable(uint64_t, RemoteVideoSample&&)
+{
+    ASSERT_NOT_REACHED();
+}
+#endif
+
 void UserMediaCaptureManager::startProducingData(uint64_t id)
 {
     m_process.send(Messages::UserMediaCaptureManagerProxy::StartProducingData(id), 0);
index 37632d5..8eab017 100644 (file)
@@ -36,6 +36,7 @@
 
 namespace WebCore {
 class CAAudioStreamDescription;
+class RemoteVideoSample;
 }
 
 namespace WebKit {
@@ -43,7 +44,7 @@ namespace WebKit {
 class CrossProcessRealtimeAudioSource;
 class WebProcess;
 
-class UserMediaCaptureManager : public WebProcessSupplement, public IPC::MessageReceiver, public WebCore::AudioCaptureFactory, public WebCore::VideoCaptureFactory {
+class UserMediaCaptureManager : public WebProcessSupplement, public IPC::MessageReceiver, public WebCore::AudioCaptureFactory, public WebCore::VideoCaptureFactory, public WebCore::DisplayCaptureFactory {
 public:
     explicit UserMediaCaptureManager(WebProcess&);
     ~UserMediaCaptureManager();
@@ -55,9 +56,10 @@ private:
     void initialize(const WebProcessCreationParameters&) final;
 
     // WebCore::RealtimeMediaSource factories
-    WebCore::CaptureSourceOrError createAudioCaptureSource(const WebCore::CaptureDevice& device, String&& hashSalt, const WebCore::MediaConstraints* constraints) final { return createCaptureSource(device, WebCore::RealtimeMediaSource::Type::Audio, WTFMove(hashSalt), constraints); }
-    WebCore::CaptureSourceOrError createVideoCaptureSource(const WebCore::CaptureDevice& device, String&& hashSalt, const WebCore::MediaConstraints* constraints) final { return createCaptureSource(device, WebCore::RealtimeMediaSource::Type::Video, WTFMove(hashSalt), constraints); }
-    WebCore::CaptureSourceOrError createCaptureSource(const WebCore::CaptureDevice&, WebCore::RealtimeMediaSource::Type, String&&, const WebCore::MediaConstraints*);
+    WebCore::CaptureSourceOrError createAudioCaptureSource(const WebCore::CaptureDevice& device, String&& hashSalt, const WebCore::MediaConstraints* constraints) final { return createCaptureSource(device, WTFMove(hashSalt), constraints); }
+    WebCore::CaptureSourceOrError createVideoCaptureSource(const WebCore::CaptureDevice& device, String&& hashSalt, const WebCore::MediaConstraints* constraints) final { return createCaptureSource(device, WTFMove(hashSalt), constraints); }
+    WebCore::CaptureSourceOrError createDisplayCaptureSource(const WebCore::CaptureDevice& device, const WebCore::MediaConstraints* constraints) final  { return createCaptureSource(device, { }, constraints); }
+    WebCore::CaptureSourceOrError createCaptureSource(const WebCore::CaptureDevice&, String&&, const WebCore::MediaConstraints*);
 
     // IPC::MessageReceiver
     void didReceiveMessage(IPC::Connection&, IPC::Decoder&) final;
@@ -70,6 +72,7 @@ private:
     void storageChanged(uint64_t id, const SharedMemory::Handle&, const WebCore::CAAudioStreamDescription&, uint64_t numberOfFrames);
     void ringBufferFrameBoundsChanged(uint64_t id, uint64_t startFrame, uint64_t endFrame);
     void audioSamplesAvailable(uint64_t id, MediaTime, uint64_t numberOfFrames, uint64_t startFrame, uint64_t endFrame);
+    void remoteVideoSampleAvailable(uint64_t id, WebCore::RemoteVideoSample&&);
 
     void startProducingData(uint64_t);
     void stopProducingData(uint64_t);
@@ -85,6 +88,6 @@ private:
     WebProcess& m_process;
 };
 
-}
+} // namespace WebKit
 
 #endif
index eb57405..a062dc3 100644 (file)
@@ -32,6 +32,7 @@ messages -> UserMediaCaptureManager {
     AudioSamplesAvailable(uint64_t id, MediaTime time, uint64_t numberOfFrames, uint64_t startFrame, uint64_t endFrame)
     ApplyConstraintsSucceeded(uint64_t id, WebCore::RealtimeMediaSourceSettings settings)
     ApplyConstraintsFailed(uint64_t id, String failedConstraint, String message)
+    RemoteVideoSampleAvailable(uint64_t id, WebCore::RemoteVideoSample sample)
 }
 
 #endif