bd0de71f0d108bd33993ddbb9a75430b3abf582f
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVMediaCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVMediaCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AVCaptureDeviceManager.h"
32 #import "AudioSourceProvider.h"
33 #import "CoreMediaSoftLink.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "RealtimeMediaSourceSettings.h"
37 #import "SoftLinking.h"
38 #import "UUID.h"
39 #import <AVFoundation/AVFoundation.h>
40 #import <objc/runtime.h>
41 #import <wtf/MainThread.h>
42
43 typedef AVCaptureConnection AVCaptureConnectionType;
44 typedef AVCaptureDevice AVCaptureDeviceTypedef;
45 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
46 typedef AVCaptureOutput AVCaptureOutputType;
47 typedef AVCaptureSession AVCaptureSessionType;
48 typedef AVCaptureAudioDataOutput AVCaptureAudioDataOutputType;
49 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
50
51 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
52
53 SOFT_LINK_CLASS(AVFoundation, AVCaptureAudioDataOutput)
54 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
55 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
56 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
57 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
58 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
59 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
60
61 #define AVCaptureAudioDataOutput getAVCaptureAudioDataOutputClass()
62 #define AVCaptureConnection getAVCaptureConnectionClass()
63 #define AVCaptureDevice getAVCaptureDeviceClass()
64 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
65 #define AVCaptureOutput getAVCaptureOutputClass()
66 #define AVCaptureSession getAVCaptureSessionClass()
67 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
68
69 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
70 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMuxed, NSString *)
71 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
72 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset1280x720, NSString *)
73 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset640x480, NSString *)
74 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset352x288, NSString *)
75 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPresetLow, NSString *)
76 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionDidStopRunningNotification, NSString *)
77
78 #define AVMediaTypeAudio getAVMediaTypeAudio()
79 #define AVMediaTypeMuxed getAVMediaTypeMuxed()
80 #define AVMediaTypeVideo getAVMediaTypeVideo()
81 #define AVCaptureSessionPreset1280x720 getAVCaptureSessionPreset1280x720()
82 #define AVCaptureSessionPreset640x480 getAVCaptureSessionPreset640x480()
83 #define AVCaptureSessionPreset352x288 getAVCaptureSessionPreset352x288()
84 #define AVCaptureSessionPresetLow getAVCaptureSessionPresetLow()
85 #define AVCaptureSessionDidStopRunningNotification getAVCaptureSessionDidStopRunningNotification()
86
87 using namespace WebCore;
88
89 @interface WebCoreAVMediaCaptureSourceObserver : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
90 {
91     AVMediaCaptureSource* m_callback;
92 }
93
94 -(id)initWithCallback:(AVMediaCaptureSource*)callback;
95 -(void)disconnect;
96 -(void)captureOutput:(AVCaptureOutputType *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType *)connection;
97 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void*)context;
98 @end
99
100 namespace WebCore {
101
102 static NSArray* sessionKVOProperties();
103
104 static dispatch_queue_t globaAudioCaptureSerialQueue()
105 {
106     static dispatch_queue_t globalQueue;
107     static dispatch_once_t onceToken;
108     dispatch_once(&onceToken, ^{
109         globalQueue = dispatch_queue_create("WebCoreAVMediaCaptureSource audio capture queue", DISPATCH_QUEUE_SERIAL);
110     });
111     return globalQueue;
112 }
113
114 static dispatch_queue_t globaVideoCaptureSerialQueue()
115 {
116     static dispatch_queue_t globalQueue;
117     static dispatch_once_t onceToken;
118     dispatch_once(&onceToken, ^{
119         globalQueue = dispatch_queue_create("WebCoreAVMediaCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL);
120         dispatch_set_target_queue(globalQueue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
121     });
122     return globalQueue;
123 }
124
125 AVMediaCaptureSource::AVMediaCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id, RealtimeMediaSource::Type type)
126     : RealtimeMediaSource(id, type, emptyString())
127     , m_objcObserver(adoptNS([[WebCoreAVMediaCaptureSourceObserver alloc] initWithCallback:this]))
128     , m_device(device)
129     , m_weakPtrFactory(this)
130 {
131     setName(device.localizedName);
132     setPersistentID(device.uniqueID);
133     setMuted(true);
134 }
135
136 AVMediaCaptureSource::~AVMediaCaptureSource()
137 {
138     [m_objcObserver disconnect];
139
140     if (m_session) {
141         for (NSString *keyName in sessionKVOProperties())
142             [m_session removeObserver:m_objcObserver.get() forKeyPath:keyName];
143         [m_session stopRunning];
144     }
145 }
146
147 void AVMediaCaptureSource::startProducingData()
148 {
149     if (!m_session)
150         setupSession();
151     
152     if ([m_session isRunning])
153         return;
154     
155     [m_session startRunning];
156 }
157
158 void AVMediaCaptureSource::stopProducingData()
159 {
160     if (!m_session || ![m_session isRunning])
161         return;
162
163     [m_session stopRunning];
164 }
165
166 void AVMediaCaptureSource::beginConfiguration()
167 {
168     if (m_session)
169         [m_session beginConfiguration];
170 }
171
172 void AVMediaCaptureSource::commitConfiguration()
173 {
174     if (m_session)
175         [m_session commitConfiguration];
176 }
177
178 void AVMediaCaptureSource::initializeSettings()
179 {
180     if (m_currentSettings.deviceId().isEmpty())
181         m_currentSettings.setSupportedConstraits(supportedConstraints());
182
183     m_currentSettings.setDeviceId(id());
184     updateSettings(m_currentSettings);
185 }
186
187 const RealtimeMediaSourceSettings& AVMediaCaptureSource::settings() const
188 {
189     const_cast<AVMediaCaptureSource&>(*this).initializeSettings();
190     return m_currentSettings;
191 }
192
193 RealtimeMediaSourceSupportedConstraints& AVMediaCaptureSource::supportedConstraints()
194 {
195     if (m_supportedConstraints.supportsDeviceId())
196         return m_supportedConstraints;
197
198     m_supportedConstraints.setSupportsDeviceId(true);
199     initializeSupportedConstraints(m_supportedConstraints);
200
201     return m_supportedConstraints;
202 }
203
204 void AVMediaCaptureSource::initializeCapabilities()
205 {
206     m_capabilities = RealtimeMediaSourceCapabilities::create(supportedConstraints());
207     m_capabilities->setDeviceId(id());
208
209     initializeCapabilities(*m_capabilities.get());
210 }
211
212 RefPtr<RealtimeMediaSourceCapabilities> AVMediaCaptureSource::capabilities() const
213 {
214     if (!m_capabilities)
215         const_cast<AVMediaCaptureSource&>(*this).initializeCapabilities();
216     return m_capabilities;
217 }
218
219 void AVMediaCaptureSource::setupSession()
220 {
221     if (m_session)
222         return;
223
224     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
225     for (NSString *keyName in sessionKVOProperties())
226         [m_session addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)nil];
227
228     [m_session beginConfiguration];
229     setupCaptureSession();
230     [m_session commitConfiguration];
231 }
232
233 void AVMediaCaptureSource::reset()
234 {
235     RealtimeMediaSource::reset();
236     m_isRunning = false;
237     for (NSString *keyName in sessionKVOProperties())
238         [m_session removeObserver:m_objcObserver.get() forKeyPath:keyName];
239
240     for (const auto& preview : m_previews) {
241         if (preview)
242             preview->invalidate();
243     }
244     m_previews.clear();
245
246     shutdownCaptureSession();
247     m_session = nullptr;
248 }
249
250 void AVMediaCaptureSource::captureSessionIsRunningDidChange(bool state)
251 {
252     scheduleDeferredTask([this, state] {
253         if (state == m_isRunning)
254             return;
255
256         m_isRunning = state;
257         setMuted(!m_isRunning);
258     });
259 }
260
261 void AVMediaCaptureSource::setVideoSampleBufferDelegate(AVCaptureVideoDataOutputType* videoOutput)
262 {
263     [videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
264 }
265
266 void AVMediaCaptureSource::setAudioSampleBufferDelegate(AVCaptureAudioDataOutputType* audioOutput)
267 {
268     [audioOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaAudioCaptureSerialQueue()];
269 }
270
271 AudioSourceProvider* AVMediaCaptureSource::audioSourceProvider()
272 {
273     ASSERT_NOT_REACHED();
274     return nullptr;
275 }
276
277 RefPtr<RealtimeMediaSourcePreview> AVMediaCaptureSource::preview()
278 {
279     RefPtr<AVMediaSourcePreview> preview = createPreview();
280     if (!preview)
281         return nullptr;
282
283     m_previews.append(preview->createWeakPtr());
284     return preview.leakRef();
285 }
286
287 void AVMediaCaptureSource::removePreview(AVMediaSourcePreview* preview)
288 {
289     size_t index;
290     for (index = 0; index < m_previews.size(); ++index) {
291         if (m_previews[index].get() == preview)
292             break;
293     }
294
295     if (index < m_previews.size())
296         m_previews.remove(index);
297 }
298
299 AVMediaSourcePreview::AVMediaSourcePreview(AVMediaCaptureSource* parent)
300     : m_parent(parent->createWeakPtr())
301 {
302 }
303
304 AVMediaSourcePreview::~AVMediaSourcePreview()
305 {
306     if (m_parent)
307         m_parent->removePreview(this);
308 }
309
310 void AVMediaSourcePreview::invalidate()
311 {
312     m_parent = nullptr;
313     RealtimeMediaSourcePreview::invalidate();
314 }
315
316 NSArray* sessionKVOProperties()
317 {
318     static NSArray* keys = [@[@"running"] retain];
319     return keys;
320 }
321
322 } // namespace WebCore
323
324 @implementation WebCoreAVMediaCaptureSourceObserver
325
326 - (id)initWithCallback:(AVMediaCaptureSource*)callback
327 {
328     self = [super init];
329     if (!self)
330         return nil;
331
332     m_callback = callback;
333     return self;
334 }
335
336 - (void)disconnect
337 {
338     [NSObject cancelPreviousPerformRequestsWithTarget:self];
339     m_callback = 0;
340 }
341
342 - (void)captureOutput:(AVCaptureOutputType *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType *)connection
343 {
344     if (!m_callback)
345         return;
346
347     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
348 }
349
350 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void*)context
351 {
352     UNUSED_PARAM(object);
353     UNUSED_PARAM(context);
354
355     if (!m_callback)
356         return;
357
358     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
359
360 #if !LOG_DISABLED
361     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
362
363     if (willChange)
364         LOG(Media, "WebCoreAVMediaCaptureSourceObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
365     else {
366         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
367         LOG(Media, "WebCoreAVMediaCaptureSourceObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
368     }
369 #endif
370
371     if ([keyPath isEqualToString:@"running"])
372         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
373 }
374
375 @end
376
377 #endif // ENABLE(MEDIA_STREAM)