Replace WTF::move with WTFMove
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AVCaptureDeviceManager.h"
32 #import "BlockExceptions.h"
33 #import "GraphicsContextCG.h"
34 #import "ImageBuffer.h"
35 #import "IntRect.h"
36 #import "Logging.h"
37 #import "MediaConstraints.h"
38 #import "NotImplemented.h"
39 #import "PlatformLayer.h"
40 #import "RealtimeMediaSourceCenter.h"
41 #import "RealtimeMediaSourceSettings.h"
42 #import <AVFoundation/AVFoundation.h>
43 #import <objc/runtime.h>
44
45 #import "CoreMediaSoftLink.h"
46
47 typedef AVCaptureConnection AVCaptureConnectionType;
48 typedef AVCaptureDevice AVCaptureDeviceType;
49 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
50 typedef AVCaptureOutput AVCaptureOutputType;
51 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
52 typedef AVCaptureVideoPreviewLayer AVCaptureVideoPreviewLayerType;
53
54 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
55 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
56
57 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
58 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
59 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
60 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
61 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoPreviewLayer)
63
64 #define AVCaptureConnection getAVCaptureConnectionClass()
65 #define AVCaptureDevice getAVCaptureDeviceClass()
66 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
67 #define AVCaptureOutput getAVCaptureOutputClass()
68 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
69 #define AVCaptureVideoPreviewLayer getAVCaptureVideoPreviewLayerClass()
70
71 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
72 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
73 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset1280x720, NSString *)
74 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset640x480, NSString *)
75 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset352x288, NSString *)
76 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPresetLow, NSString *)
77
78 #define AVMediaTypeVideo getAVMediaTypeVideo()
79 #define AVCaptureSessionPreset1280x720 getAVCaptureSessionPreset1280x720()
80 #define AVCaptureSessionPreset640x480 getAVCaptureSessionPreset640x480()
81 #define AVCaptureSessionPreset352x288 getAVCaptureSessionPreset352x288()
82 #define AVCaptureSessionPresetLow getAVCaptureSessionPresetLow()
83
84 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
85 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
86 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
87 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
88 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
89 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
90 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
91
92 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
93 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
94
95 namespace WebCore {
96
97 RefPtr<AVMediaCaptureSource> AVVideoCaptureSource::create(AVCaptureDeviceType* device, const AtomicString& id, PassRefPtr<MediaConstraints> constraint)
98 {
99     return adoptRef(new AVVideoCaptureSource(device, id, constraint));
100 }
101
102 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceType* device, const AtomicString& id, PassRefPtr<MediaConstraints> constraint)
103     : AVMediaCaptureSource(device, id, RealtimeMediaSource::Video, constraint)
104 {
105 }
106
107 AVVideoCaptureSource::~AVVideoCaptureSource()
108 {
109 }
110
111 void AVVideoCaptureSource::initializeCapabilities(RealtimeMediaSourceCapabilities&)
112 {
113     // FIXME: finish this implementation
114 }
115
116 void AVVideoCaptureSource::initializeSupportedConstraints(RealtimeMediaSourceSupportedConstraints& supportedConstraints)
117 {
118     supportedConstraints.setSupportsWidth(true);
119     supportedConstraints.setSupportsHeight(true);
120     supportedConstraints.setSupportsAspectRatio(true);
121     supportedConstraints.setSupportsFrameRate(true);
122     supportedConstraints.setSupportsFacingMode(true);
123 }
124
125 void AVVideoCaptureSource::updateSettings(RealtimeMediaSourceSettings& settings)
126 {
127     settings.setDeviceId(id());
128
129     if ([device() position] == AVCaptureDevicePositionFront)
130         settings.setFacingMode(RealtimeMediaSourceSettings::User);
131     else if ([device() position] == AVCaptureDevicePositionBack)
132         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
133     else
134         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
135     
136     settings.setFrameRate(m_frameRate);
137     settings.setWidth(m_width);
138     settings.setHeight(m_height);
139     settings.setAspectRatio(static_cast<float>(m_width) / m_height);
140 }
141
142 bool AVVideoCaptureSource::setFrameRateConstraint(float minFrameRate, float maxFrameRate)
143 {
144     AVFrameRateRange *bestFrameRateRange = 0;
145
146     for (AVFrameRateRange *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
147         if (!maxFrameRate) {
148             if (minFrameRate == [frameRateRange minFrameRate])
149                 bestFrameRateRange = frameRateRange;
150         } else if (minFrameRate >= [frameRateRange minFrameRate] && maxFrameRate <= [frameRateRange maxFrameRate]) {
151             if (CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
152                 bestFrameRateRange = frameRateRange;
153         }
154     }
155     
156     if (!bestFrameRateRange) {
157         LOG(Media, "AVVideoCaptureSource::setFrameRateConstraint(%p), frame rate range %f..%f not supported by video device", this, minFrameRate, maxFrameRate);
158         return false;
159     }
160     
161     NSError *error = nil;
162     @try {
163         if ([device() lockForConfiguration:&error]) {
164             [device() setActiveVideoMinFrameDuration:[bestFrameRateRange minFrameDuration]];
165             if (maxFrameRate)
166                 [device() setActiveVideoMaxFrameDuration:[bestFrameRateRange maxFrameDuration]];
167             [device() unlockForConfiguration];
168         }
169     } @catch(NSException *exception) {
170         LOG(Media, "AVVideoCaptureSource::setFrameRateConstraint(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
171         return false;
172     }
173     
174     if (error) {
175         LOG(Media, "AVVideoCaptureSource::setFrameRateConstraint(%p), failed to lock video device for configuration: %s", this, [[error localizedDescription] UTF8String]);
176         return false;
177     }
178
179     LOG(Media, "AVVideoCaptureSource::setFrameRateConstraint(%p) - set frame rate range to %f..%f", this, minFrameRate, maxFrameRate);
180     return true;
181 }
182
183 bool AVVideoCaptureSource::applyConstraints(MediaConstraints* constraints)
184 {
185     ASSERT(constraints);
186
187     const RealtimeMediaSourceSupportedConstraints& supportedConstraints = RealtimeMediaSourceCenter::singleton().supportedConstraints();
188     String widthConstraintValue;
189     String heightConstraintValue;
190     String widthConstraintName = supportedConstraints.nameForConstraint(MediaConstraintType::Width);
191     String heightConstraintName = supportedConstraints.nameForConstraint(MediaConstraintType::Height);
192
193     constraints->getMandatoryConstraintValue(widthConstraintName, widthConstraintValue);
194     constraints->getMandatoryConstraintValue(heightConstraintName, heightConstraintValue);
195
196     int width = widthConstraintValue.toInt();
197     int height = heightConstraintValue.toInt();
198     if (!width && !height) {
199         constraints->getOptionalConstraintValue(widthConstraintName, widthConstraintValue);
200         constraints->getOptionalConstraintValue(heightConstraintName, heightConstraintValue);
201         width = widthConstraintValue.toInt();
202         height = heightConstraintValue.toInt();
203     }
204     
205     if (width || height) {
206         NSString *preset = AVCaptureSessionInfo(session()).bestSessionPresetForVideoDimensions(width, height);
207         if (!preset || ![session() canSetSessionPreset:preset])
208             return false;
209         
210         [session() setSessionPreset:preset];
211     }
212
213     String frameRateConstraintValue;
214     String frameRateConstraintName = supportedConstraints.nameForConstraint(MediaConstraintType::FrameRate);
215     constraints->getMandatoryConstraintValue(frameRateConstraintName, frameRateConstraintValue);
216     float frameRate = frameRateConstraintValue.toFloat();
217     if (!frameRate) {
218         constraints->getOptionalConstraintValue(frameRateConstraintName, frameRateConstraintValue);
219         frameRate = frameRateConstraintValue.toFloat();
220     }
221     if (frameRate && !setFrameRateConstraint(frameRate, 0))
222         return false;
223
224     return true;
225 }
226
227 void AVVideoCaptureSource::setupCaptureSession()
228 {
229     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:nil]);
230
231     if (![session() canAddInput:videoIn.get()]) {
232         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
233         return;
234     }
235
236     [session() addInput:videoIn.get()];
237
238     if (constraints())
239         applyConstraints(constraints());
240
241     RetainPtr<AVCaptureVideoDataOutputType> videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
242     RetainPtr<NSDictionary> settingsDictionary = adoptNS([[NSDictionary alloc] initWithObjectsAndKeys:
243                                                          [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey
244                                                          , nil]);
245     [videoOutput setVideoSettings:settingsDictionary.get()];
246     setVideoSampleBufferDelegate(videoOutput.get());
247
248     if (![session() canAddOutput:videoOutput.get()]) {
249         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
250         return;
251     }
252     [session() addOutput:videoOutput.get()];
253 }
254
255 void AVVideoCaptureSource::shutdownCaptureSession()
256 {
257     m_videoPreviewLayer = nullptr;
258     m_buffer = nullptr;
259     m_lastImage = nullptr;
260     m_videoFrameTimeStamps.clear();
261     m_frameRate = 0;
262     m_width = 0;
263     m_height = 0;
264 }
265
266 bool AVVideoCaptureSource::updateFramerate(CMSampleBufferRef sampleBuffer)
267 {
268     CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
269     if (!CMTIME_IS_NUMERIC(sampleTime))
270         return false;
271
272     Float64 frameTime = CMTimeGetSeconds(sampleTime);
273     Float64 oneSecondAgo = frameTime - 1;
274     
275     m_videoFrameTimeStamps.append(frameTime);
276     
277     while (m_videoFrameTimeStamps[0] < oneSecondAgo)
278         m_videoFrameTimeStamps.remove(0);
279
280     Float64 frameRate = m_frameRate;
281     m_frameRate = (m_frameRate + m_videoFrameTimeStamps.size()) / 2;
282
283     return frameRate != m_frameRate;
284 }
285
286 void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer)
287 {
288     // Ignore frames delivered when the session is not running, we want to hang onto the last image
289     // delivered before it stopped.
290     if (m_lastImage && (!isProducingData() || muted()))
291         return;
292
293     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer.get());
294     if (!formatDescription)
295         return;
296
297     updateFramerate(sampleBuffer.get());
298
299     bool settingsChanged = false;
300
301     m_buffer = sampleBuffer;
302     m_lastImage = nullptr;
303
304     CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
305     if (dimensions.width != m_width || dimensions.height != m_height) {
306         m_width = dimensions.width;
307         m_height = dimensions.height;
308         settingsChanged = true;
309     }
310
311     if (settingsChanged)
312         this->settingsDidChanged();
313 }
314
315 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType*)
316 {
317     RetainPtr<CMSampleBufferRef> buffer = sampleBuffer;
318
319     scheduleDeferredTask([this, buffer] {
320         this->processNewFrame(buffer);
321     });
322 }
323
324 RefPtr<Image> AVVideoCaptureSource::currentFrameImage()
325 {
326     if (!currentFrameCGImage())
327         return nullptr;
328
329     FloatRect imageRect(0, 0, m_width, m_height);
330     std::unique_ptr<ImageBuffer> imageBuffer = ImageBuffer::create(imageRect.size(), Unaccelerated);
331
332     if (!imageBuffer)
333         return nullptr;
334
335     paintCurrentFrameInContext(imageBuffer->context(), imageRect);
336
337     return ImageBuffer::sinkIntoImage(WTFMove(imageBuffer));
338 }
339
340 RetainPtr<CGImageRef> AVVideoCaptureSource::currentFrameCGImage()
341 {
342     if (m_lastImage)
343         return m_lastImage;
344
345     if (!m_buffer)
346         return nullptr;
347
348     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_buffer.get()));
349     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
350
351     CVPixelBufferLockBaseAddress(pixelBuffer, 0);
352     void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
353     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
354     size_t width = CVPixelBufferGetWidth(pixelBuffer);
355     size_t height = CVPixelBufferGetHeight(pixelBuffer);
356
357     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateWithData(NULL, baseAddress, bytesPerRow * height, NULL));
358     m_lastImage = adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, provider.get(), NULL, true, kCGRenderingIntentDefault));
359
360     CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
361
362     return m_lastImage;
363 }
364
365 void AVVideoCaptureSource::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
366 {
367     if (context.paintingDisabled() || !currentFrameCGImage())
368         return;
369
370     GraphicsContextStateSaver stateSaver(context);
371     context.translate(rect.x(), rect.y() + rect.height());
372     context.scale(FloatSize(1, -1));
373     context.setImageInterpolationQuality(InterpolationLow);
374     IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
375     CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), m_lastImage.get());
376 }
377
378 PlatformLayer* AVVideoCaptureSource::platformLayer() const
379 {
380     if (m_videoPreviewLayer)
381         return m_videoPreviewLayer.get();
382
383     m_videoPreviewLayer = adoptNS([allocAVCaptureVideoPreviewLayerInstance() initWithSession:session()]);
384 #ifndef NDEBUG
385     m_videoPreviewLayer.get().name = @"AVVideoCaptureSource preview layer";
386 #endif
387
388     return m_videoPreviewLayer.get();
389 }
390
391 } // namespace WebCore
392
393 #endif // ENABLE(MEDIA_STREAM)