[MediaStream iOS] Cleanup video muting/unmuting when tab visibility changes
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AVCaptureDeviceManager.h"
32 #import "GraphicsContextCG.h"
33 #import "ImageBuffer.h"
34 #import "IntRect.h"
35 #import "Logging.h"
36 #import "MediaConstraints.h"
37 #import "MediaSampleAVFObjC.h"
38 #import "NotImplemented.h"
39 #import "PixelBufferConformerCV.h"
40 #import "PlatformLayer.h"
41 #import "RealtimeMediaSourceCenter.h"
42 #import "RealtimeMediaSourceSettings.h"
43 #import "WebActionDisablingCALayerDelegate.h"
44 #import <AVFoundation/AVCaptureDevice.h>
45 #import <AVFoundation/AVCaptureInput.h>
46 #import <AVFoundation/AVCaptureOutput.h>
47 #import <AVFoundation/AVCaptureSession.h>
48 #import <AVFoundation/AVCaptureVideoPreviewLayer.h>
49 #import <objc/runtime.h>
50
51 #if PLATFORM(IOS)
52 #include "WebCoreThread.h"
53 #include "WebCoreThreadRun.h"
54 #endif
55
56 #import "CoreMediaSoftLink.h"
57 #import "CoreVideoSoftLink.h"
58
59 typedef AVCaptureConnection AVCaptureConnectionType;
60 typedef AVCaptureDevice AVCaptureDeviceTypedef;
61 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
62 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
63 typedef AVCaptureOutput AVCaptureOutputType;
64 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
65 typedef AVFrameRateRange AVFrameRateRangeType;
66 typedef AVCaptureVideoPreviewLayer AVCaptureVideoPreviewLayerType;
67
68 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
69
70 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
71 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
72 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
73 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
74 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
75 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
76 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoPreviewLayer)
77 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
78
79 #define AVCaptureConnection getAVCaptureConnectionClass()
80 #define AVCaptureDevice getAVCaptureDeviceClass()
81 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
82 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
83 #define AVCaptureOutput getAVCaptureOutputClass()
84 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
85 #define AVCaptureVideoPreviewLayer getAVCaptureVideoPreviewLayerClass()
86 #define AVFrameRateRange getAVFrameRateRangeClass()
87
88 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
89 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset1280x720, NSString *)
90 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset960x540, NSString *)
91 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset640x480, NSString *)
92 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset352x288, NSString *)
93 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPreset320x240, NSString *)
94 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionPresetLow, NSString *)
95
96 #define AVCaptureSessionPreset1280x720 getAVCaptureSessionPreset1280x720()
97 #define AVCaptureSessionPreset960x540 getAVCaptureSessionPreset960x540()
98 #define AVCaptureSessionPreset640x480 getAVCaptureSessionPreset640x480()
99 #define AVCaptureSessionPreset352x288 getAVCaptureSessionPreset352x288()
100 #define AVCaptureSessionPreset320x240 getAVCaptureSessionPreset320x240()
101 #define AVCaptureSessionPresetLow getAVCaptureSessionPresetLow()
102
103 using namespace WebCore;
104
105 namespace WebCore {
106
107 #if PLATFORM(MAC)
108 const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8Planar;
109 #else
110 const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
111 #endif
112
113 class AVVideoCaptureSourceFactory : public RealtimeMediaSource::VideoCaptureFactory
114 #if PLATFORM(IOS)
115     , public RealtimeMediaSource::SingleSourceFactory<AVVideoCaptureSource>
116 #endif
117 {
118 public:
119     CaptureSourceOrError createVideoCaptureSource(const String& deviceID, const MediaConstraints* constraints) final {
120         AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:deviceID];
121         if (!device)
122             return { };
123         return AVVideoCaptureSource::create(device, deviceID, constraints);
124     }
125
126 #if PLATFORM(IOS)
127 private:
128     void setVideoCaptureMutedForPageVisibility(bool shouldMute)
129     {
130         if (activeSource())
131             activeSource()->setMuted(shouldMute);
132     }
133 #endif
134 };
135
136 CaptureSourceOrError AVVideoCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString& id, const MediaConstraints* constraints)
137 {
138     auto source = adoptRef(*new AVVideoCaptureSource(device, id));
139     if (constraints) {
140         auto result = source->applyConstraints(*constraints);
141         if (result)
142             return WTFMove(result.value().first);
143     }
144
145     return CaptureSourceOrError(WTFMove(source));
146 }
147
148 static AVVideoCaptureSourceFactory& avVideoCaptureSourceFactory()
149 {
150     static NeverDestroyed<AVVideoCaptureSourceFactory> factory;
151     return factory.get();
152 }
153
154 RealtimeMediaSource::VideoCaptureFactory& AVVideoCaptureSource::factory()
155 {
156     return avVideoCaptureSourceFactory();
157 }
158
159 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id)
160     : AVMediaCaptureSource(device, id, Type::Video)
161 {
162 }
163
164 AVVideoCaptureSource::~AVVideoCaptureSource()
165 {
166 #if PLATFORM(IOS)
167     avVideoCaptureSourceFactory().unsetActiveSource(*this);
168 #endif
169 }
170
171 static void updateSizeMinMax(int& min, int& max, int value)
172 {
173     min = std::min<int>(min, value);
174     max = std::max<int>(max, value);
175 }
176
177 static void updateAspectRatioMinMax(double& min, double& max, double value)
178 {
179     min = std::min<double>(min, value);
180     max = std::max<double>(max, value);
181 }
182
183 void AVVideoCaptureSource::initializeCapabilities(RealtimeMediaSourceCapabilities& capabilities)
184 {
185     AVCaptureDeviceTypedef *videoDevice = device();
186
187     if ([videoDevice position] == AVCaptureDevicePositionFront)
188         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
189     if ([videoDevice position] == AVCaptureDevicePositionBack)
190         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
191
192     Float64 lowestFrameRateRange = std::numeric_limits<double>::infinity();
193     Float64 highestFrameRateRange = 0;
194     int minimumWidth = std::numeric_limits<int>::infinity();
195     int maximumWidth = 0;
196     int minimumHeight = std::numeric_limits<int>::infinity();
197     int maximumHeight = 0;
198     double minimumAspectRatio = std::numeric_limits<double>::infinity();
199     double maximumAspectRatio = 0;
200
201     for (AVCaptureDeviceFormatType *format in [videoDevice formats]) {
202
203         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges]) {
204             lowestFrameRateRange = std::min<Float64>(lowestFrameRateRange, range.minFrameRate);
205             highestFrameRateRange = std::max<Float64>(highestFrameRateRange, range.maxFrameRate);
206         }
207
208         if (AVCaptureSessionPreset1280x720 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) {
209             updateSizeMinMax(minimumWidth, maximumWidth, 1280);
210             updateSizeMinMax(minimumHeight, maximumHeight, 720);
211             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 1280.0 / 720);
212         }
213         if (AVCaptureSessionPreset960x540 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540]) {
214             updateSizeMinMax(minimumWidth, maximumWidth, 960);
215             updateSizeMinMax(minimumHeight, maximumHeight, 540);
216             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 960 / 540);
217         }
218         if (AVCaptureSessionPreset640x480 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480]) {
219             updateSizeMinMax(minimumWidth, maximumWidth, 640);
220             updateSizeMinMax(minimumHeight, maximumHeight, 480);
221             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 640 / 480);
222         }
223         if (AVCaptureSessionPreset352x288 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288]) {
224             updateSizeMinMax(minimumWidth, maximumWidth, 352);
225             updateSizeMinMax(minimumHeight, maximumHeight, 288);
226             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 352 / 288);
227         }
228         if (AVCaptureSessionPreset320x240 && [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240]) {
229             updateSizeMinMax(minimumWidth, maximumWidth, 320);
230             updateSizeMinMax(minimumHeight, maximumHeight, 240);
231             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 320 / 240);
232         }
233     }
234
235     capabilities.setFrameRate(CapabilityValueOrRange(lowestFrameRateRange, highestFrameRateRange));
236     capabilities.setWidth(CapabilityValueOrRange(minimumWidth, maximumWidth));
237     capabilities.setHeight(CapabilityValueOrRange(minimumHeight, maximumHeight));
238     capabilities.setAspectRatio(CapabilityValueOrRange(minimumAspectRatio, maximumAspectRatio));
239 }
240
241 void AVVideoCaptureSource::initializeSupportedConstraints(RealtimeMediaSourceSupportedConstraints& supportedConstraints)
242 {
243     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
244     supportedConstraints.setSupportsWidth(true);
245     supportedConstraints.setSupportsHeight(true);
246     supportedConstraints.setSupportsAspectRatio(true);
247     supportedConstraints.setSupportsFrameRate(true);
248 }
249
250 void AVVideoCaptureSource::updateSettings(RealtimeMediaSourceSettings& settings)
251 {
252     settings.setDeviceId(id());
253
254     if ([device() position] == AVCaptureDevicePositionFront)
255         settings.setFacingMode(RealtimeMediaSourceSettings::User);
256     else if ([device() position] == AVCaptureDevicePositionBack)
257         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
258     else
259         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
260     
261     settings.setFrameRate(m_frameRate);
262     settings.setWidth(m_width);
263     settings.setHeight(m_height);
264     settings.setAspectRatio(static_cast<float>(m_width) / m_height);
265 }
266
267 bool AVVideoCaptureSource::applySize(const IntSize& size)
268 {
269     NSString *preset = bestSessionPresetForVideoDimensions(size.width(), size.height());
270     if (!preset || ![session() canSetSessionPreset:preset]) {
271         LOG(Media, "AVVideoCaptureSource::applySize(%p), unable find or set preset for width: %i, height: %i", this, size.width(), size.height());
272         return false;
273     }
274
275     return setPreset(preset);
276 }
277
278 static IntSize sizeForPreset(NSString* preset)
279 {
280     if (!preset)
281         return { };
282
283     if (AVCaptureSessionPreset1280x720 && [preset isEqualToString:AVCaptureSessionPreset1280x720])
284         return { 1280, 720 };
285
286     if (AVCaptureSessionPreset960x540 && [preset isEqualToString:AVCaptureSessionPreset960x540])
287         return { 960, 540 };
288
289     if (AVCaptureSessionPreset640x480 && [preset isEqualToString:AVCaptureSessionPreset640x480])
290         return { 640, 480 };
291
292     if (AVCaptureSessionPreset352x288 && [preset isEqualToString:AVCaptureSessionPreset352x288])
293         return { 352, 288 };
294
295     if (AVCaptureSessionPreset320x240 && [preset isEqualToString:AVCaptureSessionPreset320x240])
296         return { 320, 240 };
297     
298     return { };
299     
300 }
301
302 bool AVVideoCaptureSource::setPreset(NSString *preset)
303 {
304     if (!session()) {
305         m_pendingPreset = preset;
306         return true;
307     }
308
309     auto size = sizeForPreset(preset);
310     if (size.width() == m_width && size.height() == m_height)
311         return true;
312
313     @try {
314         session().sessionPreset = preset;
315 #if PLATFORM(MAC)
316         auto settingsDictionary = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(videoCaptureFormat), (NSString*)kCVPixelBufferWidthKey: @(size.width()), (NSString*)kCVPixelBufferHeightKey: @(size.height()), };
317         [m_videoOutput setVideoSettings:settingsDictionary];
318 #endif
319     } @catch(NSException *exception) {
320         LOG(Media, "AVVideoCaptureSource::applySize(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
321         return false;
322     }
323
324     return true;
325 }
326
327 bool AVVideoCaptureSource::applyFrameRate(double rate)
328 {
329     AVFrameRateRangeType *bestFrameRateRange = nil;
330     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
331         if (rate >= [frameRateRange minFrameRate] && rate <= [frameRateRange maxFrameRate]) {
332             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
333                 bestFrameRateRange = frameRateRange;
334         }
335     }
336
337     if (!bestFrameRateRange) {
338         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), frame rate %f not supported by video device", this, rate);
339         return false;
340     }
341
342     NSError *error = nil;
343     @try {
344         if ([device() lockForConfiguration:&error]) {
345             [device() setActiveVideoMinFrameDuration:[bestFrameRateRange minFrameDuration]];
346             [device() unlockForConfiguration];
347         }
348     } @catch(NSException *exception) {
349         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
350         return false;
351     }
352
353     if (error) {
354         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), failed to lock video device for configuration: %s", this, [[error localizedDescription] UTF8String]);
355         return false;
356     }
357
358     LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p) - set frame rate range to %f", this, rate);
359     return true;
360 }
361
362 void AVVideoCaptureSource::applySizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
363 {
364     setPreset(bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)));
365
366     if (frameRate)
367         applyFrameRate(frameRate.value());
368 }
369
370 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
371 {
372 #if PLATFORM(IOS)
373     switch (videoOrientation) {
374     case AVCaptureVideoOrientationPortrait:
375         return 180;
376     case AVCaptureVideoOrientationPortraitUpsideDown:
377         return 0;
378     case AVCaptureVideoOrientationLandscapeRight:
379         return 90;
380     case AVCaptureVideoOrientationLandscapeLeft:
381         return -90;
382     }
383 #else
384     switch (videoOrientation) {
385     case AVCaptureVideoOrientationPortrait:
386         return 0;
387     case AVCaptureVideoOrientationPortraitUpsideDown:
388         return 180;
389     case AVCaptureVideoOrientationLandscapeRight:
390         return 90;
391     case AVCaptureVideoOrientationLandscapeLeft:
392         return -90;
393     }
394 #endif
395 }
396
397 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
398 {
399     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
400     return connection ? sensorOrientation([connection videoOrientation]) : 0;
401 }
402
403 void AVVideoCaptureSource::setupCaptureSession()
404 {
405 #if PLATFORM(IOS)
406     avVideoCaptureSourceFactory().setActiveSource(*this);
407 #endif
408
409     NSError *error = nil;
410     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
411     if (error) {
412         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
413         return;
414     }
415
416     if (![session() canAddInput:videoIn.get()]) {
417         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
418         return;
419     }
420     [session() addInput:videoIn.get()];
421
422     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
423     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:videoCaptureFormat], kCVPixelBufferPixelFormatTypeKey, nil]);
424     if (m_pendingPreset) {
425 #if PLATFORM(MAC)
426         auto size = sizeForPreset(m_pendingPreset.get());
427         [settingsDictionary.get() setObject:[NSNumber numberWithInt:size.width()] forKey:(NSString*)kCVPixelBufferWidthKey];
428         [settingsDictionary.get() setObject:[NSNumber numberWithInt:size.height()] forKey:(NSString*)kCVPixelBufferHeightKey];
429 #endif
430     }
431
432     [m_videoOutput setVideoSettings:settingsDictionary.get()];
433     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
434     setVideoSampleBufferDelegate(m_videoOutput.get());
435
436     if (![session() canAddOutput:m_videoOutput.get()]) {
437         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
438         return;
439     }
440     [session() addOutput:m_videoOutput.get()];
441
442 #if PLATFORM(IOS)
443     setPreset(m_pendingPreset.get());
444 #endif
445
446     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
447     computeSampleRotation();
448 }
449
450 void AVVideoCaptureSource::shutdownCaptureSession()
451 {
452     m_buffer = nullptr;
453     m_lastImage = nullptr;
454     m_videoFrameTimeStamps.clear();
455     m_frameRate = 0;
456     m_width = 0;
457     m_height = 0;
458 }
459
460 bool AVVideoCaptureSource::updateFramerate(CMSampleBufferRef sampleBuffer)
461 {
462     CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
463     if (!CMTIME_IS_NUMERIC(sampleTime))
464         return false;
465
466     Float64 frameTime = CMTimeGetSeconds(sampleTime);
467     Float64 oneSecondAgo = frameTime - 1;
468     
469     m_videoFrameTimeStamps.append(frameTime);
470     
471     while (m_videoFrameTimeStamps[0] < oneSecondAgo)
472         m_videoFrameTimeStamps.remove(0);
473
474     Float64 frameRate = m_frameRate;
475     m_frameRate = (m_frameRate + m_videoFrameTimeStamps.size()) / 2;
476
477     return frameRate != m_frameRate;
478 }
479
480 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
481 {
482 #if PLATFORM(IOS)
483     notifier.addObserver(*this);
484     orientationChanged(notifier.orientation());
485 #else
486     UNUSED_PARAM(notifier);
487 #endif
488 }
489
490 void AVVideoCaptureSource::orientationChanged(int orientation)
491 {
492     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
493     m_deviceOrientation = orientation;
494     computeSampleRotation();
495 }
496
497 void AVVideoCaptureSource::computeSampleRotation()
498 {
499     switch (m_sensorOrientation - m_deviceOrientation) {
500     case 0:
501         m_sampleRotation = MediaSample::VideoRotation::None;
502         break;
503     case 180:
504     case -180:
505         m_sampleRotation = MediaSample::VideoRotation::UpsideDown;
506         break;
507     case 90:
508         m_sampleRotation = MediaSample::VideoRotation::Left;
509         break;
510     case -90:
511     case -270:
512         m_sampleRotation = MediaSample::VideoRotation::Right;
513         break;
514     default:
515         ASSERT_NOT_REACHED();
516         m_sampleRotation = MediaSample::VideoRotation::None;
517     }
518 }
519
520 void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer, RetainPtr<AVCaptureConnectionType> connection)
521 {
522     // Ignore frames delivered when the session is not running, we want to hang onto the last image
523     // delivered before it stopped.
524     if (m_lastImage && (!isProducingData() || muted()))
525         return;
526
527     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer.get());
528     if (!formatDescription)
529         return;
530
531     updateFramerate(sampleBuffer.get());
532     m_buffer = sampleBuffer;
533     m_lastImage = nullptr;
534
535     CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
536     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
537         std::swap(dimensions.width, dimensions.height);
538
539     if (dimensions.width != m_width || dimensions.height != m_height) {
540         m_width = dimensions.width;
541         m_height = dimensions.height;
542
543         settingsDidChange();
544     }
545
546     videoSampleAvailable(MediaSampleAVFObjC::create(m_buffer.get(), m_sampleRotation, [connection isVideoMirrored]));
547 }
548
549 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
550 {
551     RetainPtr<CMSampleBufferRef> buffer = sampleBuffer;
552     RetainPtr<AVCaptureConnectionType> connection = captureConnection;
553
554     scheduleDeferredTask([this, buffer, connection] {
555         this->processNewFrame(buffer, connection);
556     });
557 }
558
559 NSString* AVVideoCaptureSource::bestSessionPresetForVideoDimensions(std::optional<int> width, std::optional<int> height) const
560 {
561     if (!width && !height)
562         return nil;
563
564     AVCaptureDeviceTypedef *videoDevice = device();
565     if ((!width || width.value() == 1280) && (!height || height.value() == 720) && AVCaptureSessionPreset1280x720)
566         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720] ? AVCaptureSessionPreset1280x720 : nil;
567
568     if ((!width || width.value() == 960) && (!height || height.value() == 540) && AVCaptureSessionPreset960x540)
569         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540] ? AVCaptureSessionPreset960x540 : nil;
570
571     if ((!width || width.value() == 640) && (!height || height.value() == 480 ) && AVCaptureSessionPreset640x480)
572         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480] ? AVCaptureSessionPreset640x480 : nil;
573
574     if ((!width || width.value() == 352) && (!height || height.value() == 288 ) && AVCaptureSessionPreset352x288)
575         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288] ? AVCaptureSessionPreset352x288 : nil;
576
577     if ((!width || width.value() == 320) && (!height || height.value() == 240 ) && AVCaptureSessionPreset320x240)
578         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240] ? AVCaptureSessionPreset320x240 : nil;
579
580     return nil;
581 }
582
583 bool AVVideoCaptureSource::supportsSizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
584 {
585     if (!height && !width && !frameRate)
586         return true;
587
588     if ((height || width) && !bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)))
589         return false;
590
591     if (!frameRate)
592         return true;
593
594     int rate = static_cast<int>(frameRate.value());
595     for (AVFrameRateRangeType *range in [[device() activeFormat] videoSupportedFrameRateRanges]) {
596         if (rate >= static_cast<int>(range.minFrameRate) && rate <= static_cast<int>(range.maxFrameRate))
597             return true;
598     }
599
600     return false;
601 }
602
603 } // namespace WebCore
604
605 #endif // ENABLE(MEDIA_STREAM)