ff5c348f59ef0c32cdfec8225fe2b22b81d0caf8
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AVCaptureDeviceManager.h"
32 #import "GraphicsContextCG.h"
33 #import "ImageBuffer.h"
34 #import "IntRect.h"
35 #import "Logging.h"
36 #import "MediaConstraints.h"
37 #import "MediaSampleAVFObjC.h"
38 #import "NotImplemented.h"
39 #import "PixelBufferConformerCV.h"
40 #import "PlatformLayer.h"
41 #import "RealtimeMediaSourceCenter.h"
42 #import "RealtimeMediaSourceSettings.h"
43 #import "WebActionDisablingCALayerDelegate.h"
44 #import <AVFoundation/AVCaptureDevice.h>
45 #import <AVFoundation/AVCaptureInput.h>
46 #import <AVFoundation/AVCaptureOutput.h>
47 #import <AVFoundation/AVCaptureSession.h>
48 #import <AVFoundation/AVCaptureVideoPreviewLayer.h>
49 #import <objc/runtime.h>
50
51 #if PLATFORM(IOS)
52 #include "WebCoreThread.h"
53 #include "WebCoreThreadRun.h"
54 #endif
55
56 #import "CoreMediaSoftLink.h"
57 #import "CoreVideoSoftLink.h"
58
59 typedef AVCaptureConnection AVCaptureConnectionType;
60 typedef AVCaptureDevice AVCaptureDeviceTypedef;
61 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
62 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
63 typedef AVCaptureOutput AVCaptureOutputType;
64 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
65 typedef AVFrameRateRange AVFrameRateRangeType;
66 typedef AVCaptureVideoPreviewLayer AVCaptureVideoPreviewLayerType;
67
68 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
69
70 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
71 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
72 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
73 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
74 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
75 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
76 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoPreviewLayer)
77 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
78
79 #define AVCaptureConnection getAVCaptureConnectionClass()
80 #define AVCaptureDevice getAVCaptureDeviceClass()
81 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
82 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
83 #define AVCaptureOutput getAVCaptureOutputClass()
84 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
85 #define AVCaptureVideoPreviewLayer getAVCaptureVideoPreviewLayerClass()
86 #define AVFrameRateRange getAVFrameRateRangeClass()
87
88 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
89 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset1280x720, NSString *)
90 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset960x540, NSString *)
91 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset640x480, NSString *)
92 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset352x288, NSString *)
93 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset320x240, NSString *)
94 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPresetLow, NSString *)
95
96 #define AVCaptureSessionPreset1280x720 getAVCaptureSessionPreset1280x720()
97 #define AVCaptureSessionPreset960x540 getAVCaptureSessionPreset960x540()
98 #define AVCaptureSessionPreset640x480 getAVCaptureSessionPreset640x480()
99 #define AVCaptureSessionPreset352x288 getAVCaptureSessionPreset352x288()
100 #define AVCaptureSessionPreset320x240 getAVCaptureSessionPreset320x240()
101 #define AVCaptureSessionPresetLow getAVCaptureSessionPresetLow()
102
103 using namespace WebCore;
104
105 namespace WebCore {
106
107 #if PLATFORM(MAC)
108 const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8Planar;
109 #else
110 const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
111 #endif
112
113 class AVVideoCaptureSourceFactory : public RealtimeMediaSource::VideoCaptureFactory {
114 public:
115     CaptureSourceOrError createVideoCaptureSource(const String& deviceID, const MediaConstraints* constraints) final {
116         AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:deviceID];
117         if (!device)
118             return { };
119         return AVVideoCaptureSource::create(device, emptyString(), constraints);
120     }
121
122 #if PLATFORM(IOS)
123     void setActiveSource(AVVideoCaptureSource& source)
124     {
125         if (m_activeSource && m_activeSource->isProducingData())
126             m_activeSource->setMuted(true);
127         m_activeSource = &source;
128     }
129
130 private:
131     AVVideoCaptureSource* m_activeSource { nullptr };
132 #endif
133 };
134
135 CaptureSourceOrError AVVideoCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString& id, const MediaConstraints* constraints)
136 {
137     auto source = adoptRef(*new AVVideoCaptureSource(device, id));
138     if (constraints) {
139         auto result = source->applyConstraints(*constraints);
140         if (result)
141             return WTFMove(result.value().first);
142     }
143
144     return CaptureSourceOrError(WTFMove(source));
145 }
146
147 static AVVideoCaptureSourceFactory& avVideoCaptureSourceFactory()
148 {
149     static NeverDestroyed<AVVideoCaptureSourceFactory> factory;
150     return factory.get();
151 }
152
153 RealtimeMediaSource::VideoCaptureFactory& AVVideoCaptureSource::factory()
154 {
155     return avVideoCaptureSourceFactory();
156 }
157
158 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id)
159     : AVMediaCaptureSource(device, id, Type::Video)
160 {
161 }
162
163 AVVideoCaptureSource::~AVVideoCaptureSource()
164 {
165 }
166
167 static void updateSizeMinMax(int& min, int& max, int value)
168 {
169     min = std::min<int>(min, value);
170     max = std::max<int>(max, value);
171 }
172
173 static void updateAspectRatioMinMax(double& min, double& max, double value)
174 {
175     min = std::min<double>(min, value);
176     max = std::max<double>(max, value);
177 }
178
179 void AVVideoCaptureSource::initializeCapabilities(RealtimeMediaSourceCapabilities& capabilities)
180 {
181     AVCaptureDeviceTypedef *videoDevice = device();
182
183     if ([videoDevice position] == AVCaptureDevicePositionFront)
184         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
185     if ([videoDevice position] == AVCaptureDevicePositionBack)
186         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
187
188     Float64 lowestFrameRateRange = std::numeric_limits<double>::infinity();
189     Float64 highestFrameRateRange = 0;
190     int minimumWidth = std::numeric_limits<int>::infinity();
191     int maximumWidth = 0;
192     int minimumHeight = std::numeric_limits<int>::infinity();
193     int maximumHeight = 0;
194     double minimumAspectRatio = std::numeric_limits<double>::infinity();
195     double maximumAspectRatio = 0;
196
197     for (AVCaptureDeviceFormatType *format in [videoDevice formats]) {
198
199         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges]) {
200             lowestFrameRateRange = std::min<Float64>(lowestFrameRateRange, range.minFrameRate);
201             highestFrameRateRange = std::max<Float64>(highestFrameRateRange, range.maxFrameRate);
202         }
203
204         if (AVCaptureSessionPreset1280x720 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) {
205             updateSizeMinMax(minimumWidth, maximumWidth, 1280);
206             updateSizeMinMax(minimumHeight, maximumHeight, 720);
207             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 1280.0 / 720);
208         }
209         if (AVCaptureSessionPreset960x540 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540]) {
210             updateSizeMinMax(minimumWidth, maximumWidth, 960);
211             updateSizeMinMax(minimumHeight, maximumHeight, 540);
212             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 960 / 540);
213         }
214         if (AVCaptureSessionPreset640x480 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480]) {
215             updateSizeMinMax(minimumWidth, maximumWidth, 640);
216             updateSizeMinMax(minimumHeight, maximumHeight, 480);
217             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 640 / 480);
218         }
219         if (AVCaptureSessionPreset352x288 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288]) {
220             updateSizeMinMax(minimumWidth, maximumWidth, 352);
221             updateSizeMinMax(minimumHeight, maximumHeight, 288);
222             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 352 / 288);
223         }
224         if (AVCaptureSessionPreset320x240 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240]) {
225             updateSizeMinMax(minimumWidth, maximumWidth, 320);
226             updateSizeMinMax(minimumHeight, maximumHeight, 240);
227             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 320 / 240);
228         }
229     }
230
231     capabilities.setFrameRate(CapabilityValueOrRange(lowestFrameRateRange, highestFrameRateRange));
232     capabilities.setWidth(CapabilityValueOrRange(minimumWidth, maximumWidth));
233     capabilities.setHeight(CapabilityValueOrRange(minimumHeight, maximumHeight));
234     capabilities.setAspectRatio(CapabilityValueOrRange(minimumAspectRatio, maximumAspectRatio));
235 }
236
237 void AVVideoCaptureSource::initializeSupportedConstraints(RealtimeMediaSourceSupportedConstraints& supportedConstraints)
238 {
239     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
240     supportedConstraints.setSupportsWidth(true);
241     supportedConstraints.setSupportsHeight(true);
242     supportedConstraints.setSupportsAspectRatio(true);
243     supportedConstraints.setSupportsFrameRate(true);
244 }
245
246 void AVVideoCaptureSource::updateSettings(RealtimeMediaSourceSettings& settings)
247 {
248     settings.setDeviceId(id());
249
250     if ([device() position] == AVCaptureDevicePositionFront)
251         settings.setFacingMode(RealtimeMediaSourceSettings::User);
252     else if ([device() position] == AVCaptureDevicePositionBack)
253         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
254     else
255         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
256     
257     settings.setFrameRate(m_frameRate);
258     settings.setWidth(m_width);
259     settings.setHeight(m_height);
260     settings.setAspectRatio(static_cast<float>(m_width) / m_height);
261 }
262
263 bool AVVideoCaptureSource::applySize(const IntSize& size)
264 {
265     NSString *preset = bestSessionPresetForVideoDimensions(size.width(), size.height());
266     if (!preset || ![session() canSetSessionPreset:preset]) {
267         LOG(Media, "AVVideoCaptureSource::applySize(%p), unable find or set preset for width: %i, height: %i", this, size.width(), size.height());
268         return false;
269     }
270
271     return setPreset(preset);
272 }
273
274 static IntSize sizeForPreset(NSString* preset)
275 {
276     if (!preset)
277         return { };
278
279     if (AVCaptureSessionPreset1280x720 && [preset isEqualToString:AVCaptureSessionPreset1280x720])
280         return { 1280, 720 };
281
282     if (AVCaptureSessionPreset960x540 && [preset isEqualToString:AVCaptureSessionPreset960x540])
283         return { 960, 540 };
284
285     if (AVCaptureSessionPreset640x480 && [preset isEqualToString:AVCaptureSessionPreset640x480])
286         return { 640, 480 };
287
288     if (AVCaptureSessionPreset352x288 && [preset isEqualToString:AVCaptureSessionPreset352x288])
289         return { 352, 288 };
290
291     if (AVCaptureSessionPreset320x240 && [preset isEqualToString:AVCaptureSessionPreset320x240])
292         return { 320, 240 };
293     
294     return { };
295     
296 }
297
298 bool AVVideoCaptureSource::setPreset(NSString *preset)
299 {
300     if (!session()) {
301         m_pendingPreset = preset;
302         return true;
303     }
304
305     auto size = sizeForPreset(preset);
306     if (size.width() == m_width && size.height() == m_height)
307         return true;
308
309     @try {
310         session().sessionPreset = preset;
311 #if PLATFORM(MAC)
312         auto settingsDictionary = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(videoCaptureFormat), (NSString*)kCVPixelBufferWidthKey: @(size.width()), (NSString*)kCVPixelBufferHeightKey: @(size.height()), };
313         [m_videoOutput setVideoSettings:settingsDictionary];
314 #endif
315     } @catch(NSException *exception) {
316         LOG(Media, "AVVideoCaptureSource::applySize(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
317         return false;
318     }
319
320     return true;
321 }
322
323 bool AVVideoCaptureSource::applyFrameRate(double rate)
324 {
325     AVFrameRateRangeType *bestFrameRateRange = nil;
326     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
327         if (rate >= [frameRateRange minFrameRate] && rate <= [frameRateRange maxFrameRate]) {
328             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
329                 bestFrameRateRange = frameRateRange;
330         }
331     }
332
333     if (!bestFrameRateRange) {
334         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), frame rate %f not supported by video device", this, rate);
335         return false;
336     }
337
338     NSError *error = nil;
339     @try {
340         if ([device() lockForConfiguration:&error]) {
341             [device() setActiveVideoMinFrameDuration:[bestFrameRateRange minFrameDuration]];
342             [device() unlockForConfiguration];
343         }
344     } @catch(NSException *exception) {
345         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
346         return false;
347     }
348
349     if (error) {
350         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), failed to lock video device for configuration: %s", this, [[error localizedDescription] UTF8String]);
351         return false;
352     }
353
354     LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p) - set frame rate range to %f", this, rate);
355     return true;
356 }
357
358 void AVVideoCaptureSource::applySizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
359 {
360     setPreset(bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)));
361
362     if (frameRate)
363         applyFrameRate(frameRate.value());
364 }
365
366 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
367 {
368 #if PLATFORM(IOS)
369     switch (videoOrientation) {
370     case AVCaptureVideoOrientationPortrait:
371         return 180;
372     case AVCaptureVideoOrientationPortraitUpsideDown:
373         return 0;
374     case AVCaptureVideoOrientationLandscapeRight:
375         return 90;
376     case AVCaptureVideoOrientationLandscapeLeft:
377         return -90;
378     }
379 #else
380     switch (videoOrientation) {
381     case AVCaptureVideoOrientationPortrait:
382         return 0;
383     case AVCaptureVideoOrientationPortraitUpsideDown:
384         return 180;
385     case AVCaptureVideoOrientationLandscapeRight:
386         return 90;
387     case AVCaptureVideoOrientationLandscapeLeft:
388         return -90;
389     }
390 #endif
391 }
392
393 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
394 {
395     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
396     return connection ? sensorOrientation([connection videoOrientation]) : 0;
397 }
398
399 void AVVideoCaptureSource::setupCaptureSession()
400 {
401 #if PLATFORM(IOS)
402     avVideoCaptureSourceFactory().setActiveSource(*this);
403 #endif
404
405     NSError *error = nil;
406     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
407     if (error) {
408         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
409         return;
410     }
411
412     if (![session() canAddInput:videoIn.get()]) {
413         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
414         return;
415     }
416     [session() addInput:videoIn.get()];
417
418     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
419     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:videoCaptureFormat], kCVPixelBufferPixelFormatTypeKey, nil]);
420     if (m_pendingPreset) {
421 #if PLATFORM(MAC)
422         auto size = sizeForPreset(m_pendingPreset.get());
423         [settingsDictionary.get() setObject:[NSNumber numberWithInt:size.width()] forKey:(NSString*)kCVPixelBufferWidthKey];
424         [settingsDictionary.get() setObject:[NSNumber numberWithInt:size.height()] forKey:(NSString*)kCVPixelBufferHeightKey];
425 #endif
426     }
427
428     [m_videoOutput setVideoSettings:settingsDictionary.get()];
429     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
430     setVideoSampleBufferDelegate(m_videoOutput.get());
431
432     if (![session() canAddOutput:m_videoOutput.get()]) {
433         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
434         return;
435     }
436     [session() addOutput:m_videoOutput.get()];
437
438 #if PLATFORM(IOS)
439     setPreset(m_pendingPreset.get());
440 #endif
441
442     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
443     computeSampleRotation();
444 }
445
446 void AVVideoCaptureSource::shutdownCaptureSession()
447 {
448     m_buffer = nullptr;
449     m_lastImage = nullptr;
450     m_videoFrameTimeStamps.clear();
451     m_frameRate = 0;
452     m_width = 0;
453     m_height = 0;
454 }
455
456 bool AVVideoCaptureSource::updateFramerate(CMSampleBufferRef sampleBuffer)
457 {
458     CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
459     if (!CMTIME_IS_NUMERIC(sampleTime))
460         return false;
461
462     Float64 frameTime = CMTimeGetSeconds(sampleTime);
463     Float64 oneSecondAgo = frameTime - 1;
464     
465     m_videoFrameTimeStamps.append(frameTime);
466     
467     while (m_videoFrameTimeStamps[0] < oneSecondAgo)
468         m_videoFrameTimeStamps.remove(0);
469
470     Float64 frameRate = m_frameRate;
471     m_frameRate = (m_frameRate + m_videoFrameTimeStamps.size()) / 2;
472
473     return frameRate != m_frameRate;
474 }
475
476 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
477 {
478 #if PLATFORM(IOS)
479     notifier.addObserver(*this);
480     orientationChanged(notifier.orientation());
481 #else
482     UNUSED_PARAM(notifier);
483 #endif
484 }
485
486 void AVVideoCaptureSource::orientationChanged(int orientation)
487 {
488     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
489     m_deviceOrientation = orientation;
490     computeSampleRotation();
491 }
492
493 void AVVideoCaptureSource::computeSampleRotation()
494 {
495     switch (m_sensorOrientation - m_deviceOrientation) {
496     case 0:
497         m_sampleRotation = MediaSample::VideoRotation::None;
498         break;
499     case 180:
500     case -180:
501         m_sampleRotation = MediaSample::VideoRotation::UpsideDown;
502         break;
503     case 90:
504         m_sampleRotation = MediaSample::VideoRotation::Left;
505         break;
506     case -90:
507     case -270:
508         m_sampleRotation = MediaSample::VideoRotation::Right;
509         break;
510     default:
511         ASSERT_NOT_REACHED();
512         m_sampleRotation = MediaSample::VideoRotation::None;
513     }
514 }
515
516 void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer, RetainPtr<AVCaptureConnectionType> connection)
517 {
518     // Ignore frames delivered when the session is not running, we want to hang onto the last image
519     // delivered before it stopped.
520     if (m_lastImage && (!isProducingData() || muted()))
521         return;
522
523     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer.get());
524     if (!formatDescription)
525         return;
526
527     updateFramerate(sampleBuffer.get());
528     m_buffer = sampleBuffer;
529     m_lastImage = nullptr;
530
531     CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
532     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
533         std::swap(dimensions.width, dimensions.height);
534
535     if (dimensions.width != m_width || dimensions.height != m_height) {
536         m_width = dimensions.width;
537         m_height = dimensions.height;
538
539         settingsDidChange();
540     }
541
542     videoSampleAvailable(MediaSampleAVFObjC::create(m_buffer.get(), m_sampleRotation, [connection isVideoMirrored]));
543 }
544
545 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
546 {
547     RetainPtr<CMSampleBufferRef> buffer = sampleBuffer;
548     RetainPtr<AVCaptureConnectionType> connection = captureConnection;
549
550     scheduleDeferredTask([this, buffer, connection] {
551         this->processNewFrame(buffer, connection);
552     });
553 }
554
555 NSString* AVVideoCaptureSource::bestSessionPresetForVideoDimensions(std::optional<int> width, std::optional<int> height) const
556 {
557     if (!width && !height)
558         return nil;
559
560     AVCaptureDeviceTypedef *videoDevice = device();
561     if ((!width || width.value() == 1280) && (!height || height.value() == 720) && AVCaptureSessionPreset1280x720)
562         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720] ? AVCaptureSessionPreset1280x720 : nil;
563
564     if ((!width || width.value() == 960) && (!height || height.value() == 540) && AVCaptureSessionPreset960x540)
565         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540] ? AVCaptureSessionPreset960x540 : nil;
566
567     if ((!width || width.value() == 640) && (!height || height.value() == 480 ) && AVCaptureSessionPreset640x480)
568         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480] ? AVCaptureSessionPreset640x480 : nil;
569
570     if ((!width || width.value() == 352) && (!height || height.value() == 288 ) && AVCaptureSessionPreset352x288)
571         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288] ? AVCaptureSessionPreset352x288 : nil;
572
573     if ((!width || width.value() == 320) && (!height || height.value() == 240 ) && AVCaptureSessionPreset320x240)
574         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240] ? AVCaptureSessionPreset320x240 : nil;
575
576     return nil;
577 }
578
579 bool AVVideoCaptureSource::supportsSizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
580 {
581     if (!height && !width && !frameRate)
582         return true;
583
584     if ((height || width) && !bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)))
585         return false;
586
587     if (!frameRate)
588         return true;
589
590     int rate = static_cast<int>(frameRate.value());
591     for (AVFrameRateRangeType *range in [[device() activeFormat] videoSupportedFrameRateRanges]) {
592         if (rate >= static_cast<int>(range.minFrameRate) && rate <= static_cast<int>(range.maxFrameRate))
593             return true;
594     }
595
596     return false;
597 }
598
599 } // namespace WebCore
600
601 #endif // ENABLE(MEDIA_STREAM)