ef5e45c9031d9623bcf247c88f58d4f1831b26da
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AVCaptureDeviceManager.h"
32 #import "GraphicsContextCG.h"
33 #import "ImageBuffer.h"
34 #import "IntRect.h"
35 #import "Logging.h"
36 #import "MediaConstraints.h"
37 #import "MediaSampleAVFObjC.h"
38 #import "NotImplemented.h"
39 #import "PlatformLayer.h"
40 #import "RealtimeMediaSourceCenter.h"
41 #import "RealtimeMediaSourcePreview.h"
42 #import "RealtimeMediaSourceSettings.h"
43 #import <AVFoundation/AVFoundation.h>
44 #import <objc/runtime.h>
45
46 #import "CoreMediaSoftLink.h"
47 #import "CoreVideoSoftLink.h"
48
49 typedef AVCaptureConnection AVCaptureConnectionType;
50 typedef AVCaptureDevice AVCaptureDeviceTypedef;
51 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
52 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
53 typedef AVCaptureOutput AVCaptureOutputType;
54 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
55 typedef AVFrameRateRange AVFrameRateRangeType;
56 typedef AVCaptureVideoPreviewLayer AVCaptureVideoPreviewLayerType;
57
58 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
59
60 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
61 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoPreviewLayer)
67 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
68
69 #define AVCaptureConnection getAVCaptureConnectionClass()
70 #define AVCaptureDevice getAVCaptureDeviceClass()
71 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
72 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
73 #define AVCaptureOutput getAVCaptureOutputClass()
74 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
75 #define AVCaptureVideoPreviewLayer getAVCaptureVideoPreviewLayerClass()
76 #define AVFrameRateRange getAVFrameRateRangeClass()
77
78 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset1280x720, NSString *)
79 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset960x540, NSString *)
80 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset640x480, NSString *)
81 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset352x288, NSString *)
82 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset320x240, NSString*)
83 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPresetLow, NSString *)
84
85 #define AVCaptureSessionPreset1280x720 getAVCaptureSessionPreset1280x720()
86 #define AVCaptureSessionPreset960x540 getAVCaptureSessionPreset960x540()
87 #define AVCaptureSessionPreset640x480 getAVCaptureSessionPreset640x480()
88 #define AVCaptureSessionPreset352x288 getAVCaptureSessionPreset352x288()
89 #define AVCaptureSessionPreset320x240 getAVCaptureSessionPreset320x240()
90 #define AVCaptureSessionPresetLow getAVCaptureSessionPresetLow()
91
92 namespace WebCore {
93
94 class AVVideoSourcePreview: public AVMediaSourcePreview {
95 public:
96     static RefPtr<AVMediaSourcePreview> create(AVCaptureSession *, AVCaptureDeviceTypedef *, AVVideoCaptureSource*);
97
98 private:
99     AVVideoSourcePreview(AVCaptureSession *, AVCaptureDeviceTypedef *, AVVideoCaptureSource*);
100
101     void invalidate() final;
102
103     void play() const final;
104     void pause() const final;
105     void setVolume(double) const final { };
106     void setEnabled(bool) final;
107     PlatformLayer* platformLayer() const final { return m_previewBackgroundLayer.get(); }
108     
109     void setPaused(bool) const;
110
111     RetainPtr<AVCaptureVideoPreviewLayerType> m_previewLayer;
112     RetainPtr<PlatformLayer> m_previewBackgroundLayer;
113     RetainPtr<AVCaptureDeviceTypedef> m_device;
114 };
115
116 RefPtr<AVMediaSourcePreview> AVVideoSourcePreview::create(AVCaptureSession *session, AVCaptureDeviceTypedef* device, AVVideoCaptureSource* parent)
117 {
118     return adoptRef(new AVVideoSourcePreview(session, device, parent));
119 }
120
121 AVVideoSourcePreview::AVVideoSourcePreview(AVCaptureSession *session, AVCaptureDeviceTypedef* device, AVVideoCaptureSource* parent)
122     : AVMediaSourcePreview(parent)
123 {
124     m_device = device;
125     m_previewLayer = adoptNS([allocAVCaptureVideoPreviewLayerInstance() initWithSession:session]);
126 #ifndef NDEBUG
127     m_previewLayer.get().name = @"AVVideoCaptureSource preview layer";
128 #endif
129
130     m_previewLayer.get().contentsGravity = kCAGravityResize;
131     m_previewLayer.get().anchorPoint = CGPointZero;
132 #if !PLATFORM(IOS)
133     m_previewLayer.get().autoresizingMask = kCALayerWidthSizable | kCALayerHeightSizable;
134 #endif
135
136     m_previewBackgroundLayer = adoptNS([[CALayer alloc] init]);
137     m_previewBackgroundLayer.get().name = @"AVVideoSourcePreview parent layer";
138     m_previewBackgroundLayer.get().contentsGravity = kCAGravityResizeAspect;
139     m_previewBackgroundLayer.get().anchorPoint = CGPointZero;
140     m_previewBackgroundLayer.get().needsDisplayOnBoundsChange = YES;
141 #if !PLATFORM(IOS)
142     m_previewBackgroundLayer.get().autoresizingMask = kCALayerWidthSizable | kCALayerHeightSizable;
143 #endif
144
145     [m_previewBackgroundLayer addSublayer:m_previewLayer.get()];
146 }
147
148 void AVVideoSourcePreview::invalidate()
149 {
150     m_previewLayer = nullptr;
151     m_previewBackgroundLayer = nullptr;
152     m_device = nullptr;
153     AVMediaSourcePreview::invalidate();
154 }
155
156 void AVVideoSourcePreview::play() const
157 {
158     setPaused(false);
159 }
160
161 void AVVideoSourcePreview::pause() const
162 {
163     setPaused(true);
164 }
165
166 void AVVideoSourcePreview::setPaused(bool paused) const
167 {
168     [m_device lockForConfiguration:nil];
169     m_previewLayer.get().connection.enabled = !paused;
170     [m_device unlockForConfiguration];
171 }
172
173 void AVVideoSourcePreview::setEnabled(bool enabled)
174 {
175     m_previewLayer.get().hidden = !enabled;
176 }
177
178 const OSType videoCaptureFormat = kCVPixelFormatType_32BGRA;
179
180 RefPtr<AVMediaCaptureSource> AVVideoCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString& id, const MediaConstraints* constraints, String& invalidConstraint)
181 {
182     auto source = adoptRef(new AVVideoCaptureSource(device, id));
183     if (constraints) {
184         auto result = source->applyConstraints(*constraints);
185         if (result) {
186             invalidConstraint = result.value().first;
187             source = nullptr;
188         }
189     }
190
191     return source;
192 }
193
194 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id)
195     : AVMediaCaptureSource(device, id, RealtimeMediaSource::Video)
196 {
197 }
198
199 AVVideoCaptureSource::~AVVideoCaptureSource()
200 {
201 }
202
203 static void updateSizeMinMax(int& min, int& max, int value)
204 {
205     min = std::min<int>(min, value);
206     max = std::max<int>(max, value);
207 }
208
209 static void updateAspectRatioMinMax(double& min, double& max, double value)
210 {
211     min = std::min<double>(min, value);
212     max = std::max<double>(max, value);
213 }
214
215 void AVVideoCaptureSource::initializeCapabilities(RealtimeMediaSourceCapabilities& capabilities)
216 {
217     AVCaptureDeviceTypedef *videoDevice = device();
218
219     if ([videoDevice position] == AVCaptureDevicePositionFront)
220         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
221     if ([videoDevice position] == AVCaptureDevicePositionBack)
222         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
223
224     Float64 lowestFrameRateRange = std::numeric_limits<double>::infinity();
225     Float64 highestFrameRateRange = 0;
226     int minimumWidth = std::numeric_limits<int>::infinity();
227     int maximumWidth = 0;
228     int minimumHeight = std::numeric_limits<int>::infinity();
229     int maximumHeight = 0;
230     double minimumAspectRatio = std::numeric_limits<double>::infinity();
231     double maximumAspectRatio = 0;
232
233     for (AVCaptureDeviceFormatType *format in [videoDevice formats]) {
234
235         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges]) {
236             lowestFrameRateRange = std::min<Float64>(lowestFrameRateRange, range.minFrameRate);
237             highestFrameRateRange = std::max<Float64>(highestFrameRateRange, range.maxFrameRate);
238         }
239
240         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) {
241             updateSizeMinMax(minimumWidth, maximumWidth, 1280);
242             updateSizeMinMax(minimumHeight, maximumHeight, 720);
243             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 1280.0 / 720);
244         }
245         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540]) {
246             updateSizeMinMax(minimumWidth, maximumWidth, 960);
247             updateSizeMinMax(minimumHeight, maximumHeight, 540);
248             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 960 / 540);
249         }
250         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480]) {
251             updateSizeMinMax(minimumWidth, maximumWidth, 640);
252             updateSizeMinMax(minimumHeight, maximumHeight, 480);
253             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 640 / 480);
254         }
255         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288]) {
256             updateSizeMinMax(minimumWidth, maximumWidth, 352);
257             updateSizeMinMax(minimumHeight, maximumHeight, 288);
258             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 352 / 288);
259         }
260         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240]) {
261             updateSizeMinMax(minimumWidth, maximumWidth, 320);
262             updateSizeMinMax(minimumHeight, maximumHeight, 240);
263             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 320 / 240);
264         }
265     }
266
267     capabilities.setFrameRate(CapabilityValueOrRange(lowestFrameRateRange, highestFrameRateRange));
268     capabilities.setWidth(CapabilityValueOrRange(minimumWidth, maximumWidth));
269     capabilities.setHeight(CapabilityValueOrRange(minimumHeight, maximumHeight));
270     capabilities.setAspectRatio(CapabilityValueOrRange(minimumAspectRatio, maximumAspectRatio));
271 }
272
273 void AVVideoCaptureSource::initializeSupportedConstraints(RealtimeMediaSourceSupportedConstraints& supportedConstraints)
274 {
275     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
276     supportedConstraints.setSupportsWidth(true);
277     supportedConstraints.setSupportsHeight(true);
278     supportedConstraints.setSupportsAspectRatio(true);
279     supportedConstraints.setSupportsFrameRate(true);
280 }
281
282 void AVVideoCaptureSource::updateSettings(RealtimeMediaSourceSettings& settings)
283 {
284     settings.setDeviceId(id());
285
286     if ([device() position] == AVCaptureDevicePositionFront)
287         settings.setFacingMode(RealtimeMediaSourceSettings::User);
288     else if ([device() position] == AVCaptureDevicePositionBack)
289         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
290     else
291         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
292     
293     settings.setFrameRate(m_frameRate);
294     settings.setWidth(m_width);
295     settings.setHeight(m_height);
296     settings.setAspectRatio(static_cast<float>(m_width) / m_height);
297 }
298
299 bool AVVideoCaptureSource::applySize(const IntSize& size)
300 {
301     NSString *preset = bestSessionPresetForVideoDimensions(size.width(), size.height());
302     if (!preset || ![session() canSetSessionPreset:preset]) {
303         LOG(Media, "AVVideoCaptureSource::applySize(%p), unable find or set preset for width: %i, height: %i", this, size.width(), size.height());
304         return false;
305     }
306
307     return setPreset(preset);
308 }
309
310 bool AVVideoCaptureSource::setPreset(NSString *preset)
311 {
312     if (!session()) {
313         m_pendingPreset = preset;
314         return true;
315     }
316     m_pendingPreset = nullptr;
317     if (!preset)
318         return true;
319
320     @try {
321         session().sessionPreset = preset;
322     } @catch(NSException *exception) {
323         LOG(Media, "AVVideoCaptureSource::applySize(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
324         return false;
325     }
326
327     return true;
328 }
329
330 bool AVVideoCaptureSource::applyFrameRate(double rate)
331 {
332     AVFrameRateRangeType *bestFrameRateRange = nil;
333     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
334         if (rate >= [frameRateRange minFrameRate] && rate <= [frameRateRange maxFrameRate]) {
335             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
336                 bestFrameRateRange = frameRateRange;
337         }
338     }
339
340     if (!bestFrameRateRange) {
341         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), frame rate %f not supported by video device", this, rate);
342         return false;
343     }
344
345     NSError *error = nil;
346     @try {
347         if ([device() lockForConfiguration:&error]) {
348             [device() setActiveVideoMinFrameDuration:[bestFrameRateRange minFrameDuration]];
349             [device() unlockForConfiguration];
350         }
351     } @catch(NSException *exception) {
352         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
353         return false;
354     }
355
356     if (error) {
357         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), failed to lock video device for configuration: %s", this, [[error localizedDescription] UTF8String]);
358         return false;
359     }
360
361     LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p) - set frame rate range to %f", this, rate);
362     return true;
363 }
364
365 void AVVideoCaptureSource::applySizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
366 {
367     setPreset(bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)));
368
369     if (frameRate)
370         applyFrameRate(frameRate.value());
371 }
372
373 void AVVideoCaptureSource::setupCaptureSession()
374 {
375     if (m_pendingPreset)
376         setPreset(m_pendingPreset.get());
377
378     NSError *error = nil;
379     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
380     if (error) {
381         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
382         return;
383     }
384
385     if (![session() canAddInput:videoIn.get()]) {
386         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
387         return;
388     }
389     [session() addInput:videoIn.get()];
390
391     RetainPtr<AVCaptureVideoDataOutputType> videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
392     RetainPtr<NSDictionary> settingsDictionary = adoptNS([[NSDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:videoCaptureFormat], kCVPixelBufferPixelFormatTypeKey, nil]);
393     [videoOutput setVideoSettings:settingsDictionary.get()];
394     [videoOutput setAlwaysDiscardsLateVideoFrames:YES];
395     setVideoSampleBufferDelegate(videoOutput.get());
396
397     if (![session() canAddOutput:videoOutput.get()]) {
398         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
399         return;
400     }
401     [session() addOutput:videoOutput.get()];
402 }
403
404 void AVVideoCaptureSource::shutdownCaptureSession()
405 {
406     m_buffer = nullptr;
407     m_lastImage = nullptr;
408     m_videoFrameTimeStamps.clear();
409     m_frameRate = 0;
410     m_width = 0;
411     m_height = 0;
412 }
413
414 bool AVVideoCaptureSource::updateFramerate(CMSampleBufferRef sampleBuffer)
415 {
416     CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
417     if (!CMTIME_IS_NUMERIC(sampleTime))
418         return false;
419
420     Float64 frameTime = CMTimeGetSeconds(sampleTime);
421     Float64 oneSecondAgo = frameTime - 1;
422     
423     m_videoFrameTimeStamps.append(frameTime);
424     
425     while (m_videoFrameTimeStamps[0] < oneSecondAgo)
426         m_videoFrameTimeStamps.remove(0);
427
428     Float64 frameRate = m_frameRate;
429     m_frameRate = (m_frameRate + m_videoFrameTimeStamps.size()) / 2;
430
431     return frameRate != m_frameRate;
432 }
433
434 void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer)
435 {
436     // Ignore frames delivered when the session is not running, we want to hang onto the last image
437     // delivered before it stopped.
438     if (m_lastImage && (!isProducingData() || muted()))
439         return;
440
441     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer.get());
442     if (!formatDescription)
443         return;
444
445     updateFramerate(sampleBuffer.get());
446
447     CMSampleBufferRef newSampleBuffer = 0;
448     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer.get(), &newSampleBuffer);
449     ASSERT(newSampleBuffer);
450
451     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
452     if (attachmentsArray) {
453         for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
454             CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
455             CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
456         }
457     }
458
459     m_buffer = adoptCF(newSampleBuffer);
460     m_lastImage = nullptr;
461
462     bool settingsChanged = false;
463     CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
464     if (dimensions.width != m_width || dimensions.height != m_height) {
465         m_width = dimensions.width;
466         m_height = dimensions.height;
467         settingsChanged = true;
468     }
469
470     if (settingsChanged)
471         settingsDidChange();
472
473     mediaDataUpdated(MediaSampleAVFObjC::create(m_buffer.get()));
474 }
475
476 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType*)
477 {
478     RetainPtr<CMSampleBufferRef> buffer = sampleBuffer;
479
480     scheduleDeferredTask([this, buffer] {
481         this->processNewFrame(buffer);
482     });
483 }
484
485 RefPtr<Image> AVVideoCaptureSource::currentFrameImage()
486 {
487     if (!currentFrameCGImage())
488         return nullptr;
489
490     FloatRect imageRect(0, 0, m_width, m_height);
491     std::unique_ptr<ImageBuffer> imageBuffer = ImageBuffer::create(imageRect.size(), Unaccelerated);
492
493     if (!imageBuffer)
494         return nullptr;
495
496     paintCurrentFrameInContext(imageBuffer->context(), imageRect);
497
498     return ImageBuffer::sinkIntoImage(WTFMove(imageBuffer));
499 }
500
501 RetainPtr<CGImageRef> AVVideoCaptureSource::currentFrameCGImage()
502 {
503     if (m_lastImage)
504         return m_lastImage;
505
506     if (!m_buffer)
507         return nullptr;
508
509     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_buffer.get()));
510     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == videoCaptureFormat);
511
512     CVPixelBufferLockBaseAddress(pixelBuffer, 0);
513     void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
514     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
515     size_t width = CVPixelBufferGetWidth(pixelBuffer);
516     size_t height = CVPixelBufferGetHeight(pixelBuffer);
517
518     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateWithData(NULL, baseAddress, bytesPerRow * height, NULL));
519     m_lastImage = adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, provider.get(), NULL, true, kCGRenderingIntentDefault));
520
521     CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
522
523     return m_lastImage;
524 }
525
526 void AVVideoCaptureSource::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
527 {
528     if (context.paintingDisabled() || !currentFrameCGImage())
529         return;
530
531     GraphicsContextStateSaver stateSaver(context);
532     context.translate(rect.x(), rect.y() + rect.height());
533     context.scale(FloatSize(1, -1));
534     context.setImageInterpolationQuality(InterpolationLow);
535     IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
536     CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), m_lastImage.get());
537 }
538
539 RefPtr<AVMediaSourcePreview> AVVideoCaptureSource::createPreview()
540 {
541     return AVVideoSourcePreview::create(session(), device(), this);
542 }
543
544 NSString *AVVideoCaptureSource::bestSessionPresetForVideoDimensions(std::optional<int> width, std::optional<int> height) const
545 {
546     if (!width && !height)
547         return nil;
548
549     AVCaptureDeviceTypedef *videoDevice = device();
550     if ((!width || width.value() == 1280) && (!height || height.value() == 720))
551         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720] ? AVCaptureSessionPreset1280x720 : nil;
552
553     if ((!width || width.value() == 960) && (!height || height.value() == 540 ))
554         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540] ? AVCaptureSessionPreset960x540 : nil;
555
556     if ((!width || width.value() == 640) && (!height || height.value() == 480 ))
557         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480] ? AVCaptureSessionPreset640x480 : nil;
558
559     if ((!width || width.value() == 352) && (!height || height.value() == 288 ))
560         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288] ? AVCaptureSessionPreset352x288 : nil;
561
562     if ((!width || width.value() == 320) && (!height || height.value() == 240 ))
563         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240] ? AVCaptureSessionPreset320x240 : nil;
564
565     return nil;
566 }
567
568 bool AVVideoCaptureSource::supportsSizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
569 {
570     if (!height && !width && !frameRate)
571         return true;
572
573     if ((height || width) && !bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)))
574         return false;
575
576     if (!frameRate)
577         return true;
578
579     double rate = frameRate.value();
580     for (AVFrameRateRangeType *range in [[device() activeFormat] videoSupportedFrameRateRanges]) {
581         if (rate >= range.minFrameRate && rate <= range.maxFrameRate)
582             return true;
583     }
584
585     return false;
586 }
587
588 } // namespace WebCore
589
590 #endif // ENABLE(MEDIA_STREAM)