17e0556d9c67810293ee289d1e9f02a7ecca204f
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "IntRect.h"
33 #import "Logging.h"
34 #import "MediaConstraints.h"
35 #import "MediaSampleAVFObjC.h"
36 #import "PlatformLayer.h"
37 #import "RealtimeMediaSourceCenterMac.h"
38 #import "RealtimeMediaSourceSettings.h"
39 #import <AVFoundation/AVCaptureDevice.h>
40 #import <AVFoundation/AVCaptureInput.h>
41 #import <AVFoundation/AVCaptureOutput.h>
42 #import <AVFoundation/AVCaptureSession.h>
43 #import <AVFoundation/AVError.h>
44 #import <objc/runtime.h>
45
46 #import <pal/cf/CoreMediaSoftLink.h>
47 #import "CoreVideoSoftLink.h"
48
49 typedef AVCaptureConnection AVCaptureConnectionType;
50 typedef AVCaptureDevice AVCaptureDeviceTypedef;
51 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
52 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
53 typedef AVCaptureOutput AVCaptureOutputType;
54 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
55 typedef AVFrameRateRange AVFrameRateRangeType;
56 typedef AVCaptureSession AVCaptureSessionType;
57
58 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
59
60 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
61 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
66 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
68
69 #define AVCaptureConnection getAVCaptureConnectionClass()
70 #define AVCaptureDevice getAVCaptureDeviceClass()
71 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
72 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
73 #define AVCaptureOutput getAVCaptureOutputClass()
74 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
75 #define AVFrameRateRange getAVFrameRateRangeClass()
76
77 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
78 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVCaptureSessionPreset1280x720, NSString *)
79 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVCaptureSessionPreset960x540, NSString *)
80 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVCaptureSessionPreset640x480, NSString *)
81 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVCaptureSessionPreset352x288, NSString *)
82 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVCaptureSessionPreset320x240, NSString *)
83
84 #define AVCaptureSessionPreset1280x720 getAVCaptureSessionPreset1280x720()
85 #define AVCaptureSessionPreset960x540 getAVCaptureSessionPreset960x540()
86 #define AVCaptureSessionPreset640x480 getAVCaptureSessionPreset640x480()
87 #define AVCaptureSessionPreset352x288 getAVCaptureSessionPreset352x288()
88 #define AVCaptureSessionPreset320x240 getAVCaptureSessionPreset320x240()
89
90 #if PLATFORM(IOS)
91 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVCaptureSessionPreset3840x2160, NSString *)
92 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVCaptureSessionPreset1920x1080, NSString *)
93 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionRuntimeErrorNotification, NSString *)
94 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionWasInterruptedNotification, NSString *)
95 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionEndedNotification, NSString *)
96 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionReasonKey, NSString *)
97 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionErrorKey, NSString *)
98
99 #define AVCaptureSessionPreset3840x2160 getAVCaptureSessionPreset3840x2160()
100 #define AVCaptureSessionPreset1920x1080 getAVCaptureSessionPreset1920x1080()
101 #define AVCaptureSessionRuntimeErrorNotification getAVCaptureSessionRuntimeErrorNotification()
102 #define AVCaptureSessionWasInterruptedNotification getAVCaptureSessionWasInterruptedNotification()
103 #define AVCaptureSessionInterruptionEndedNotification getAVCaptureSessionInterruptionEndedNotification()
104 #define AVCaptureSessionInterruptionReasonKey getAVCaptureSessionInterruptionReasonKey()
105 #define AVCaptureSessionErrorKey getAVCaptureSessionErrorKey()
106 #endif
107
108 using namespace WebCore;
109
110 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
111     AVVideoCaptureSource* m_callback;
112 }
113
114 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
115 -(void)disconnect;
116 -(void)addNotificationObservers;
117 -(void)removeNotificationObservers;
118 -(void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection;
119 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
120 #if PLATFORM(IOS)
121 -(void)sessionRuntimeError:(NSNotification*)notification;
122 -(void)beginSessionInterrupted:(NSNotification*)notification;
123 -(void)endSessionInterrupted:(NSNotification*)notification;
124 #endif
125 @end
126
127 namespace WebCore {
128
129 #if PLATFORM(MAC)
130 const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8Planar;
131 #else
132 const OSType videoCaptureFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
133 #endif
134
135 static dispatch_queue_t globaVideoCaptureSerialQueue()
136 {
137     static dispatch_queue_t globalQueue;
138     static dispatch_once_t onceToken;
139     dispatch_once(&onceToken, ^{
140         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
141     });
142     return globalQueue;
143 }
144
145 CaptureSourceOrError AVVideoCaptureSource::create(const AtomicString& id, const MediaConstraints* constraints)
146 {
147     AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:id];
148     if (!device)
149         return { };
150
151     auto source = adoptRef(*new AVVideoCaptureSource(device, id));
152     if (constraints) {
153         auto result = source->applyConstraints(*constraints);
154         if (result)
155             return WTFMove(result.value().first);
156     }
157
158     return CaptureSourceOrError(WTFMove(source));
159 }
160
161 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id)
162     : RealtimeMediaSource(id, Type::Video, device.localizedName)
163     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
164     , m_device(device)
165 {
166     struct VideoPreset {
167         bool symbolAvailable;
168         NSString* name;
169         int width;
170         int height;
171     };
172
173     static const VideoPreset presets[] = {
174 #if PLATFORM(IOS)
175         { canLoadAVCaptureSessionPreset3840x2160(), AVCaptureSessionPreset3840x2160, 3840, 2160  },
176         { canLoadAVCaptureSessionPreset1920x1080(), AVCaptureSessionPreset1920x1080, 1920, 1080 },
177 #endif
178         { canLoadAVCaptureSessionPreset1280x720(), AVCaptureSessionPreset1280x720, 1280, 720 },
179         { canLoadAVCaptureSessionPreset960x540(), AVCaptureSessionPreset960x540, 960, 540 },
180         { canLoadAVCaptureSessionPreset640x480(), AVCaptureSessionPreset640x480, 640, 480 },
181         { canLoadAVCaptureSessionPreset352x288(), AVCaptureSessionPreset352x288, 352, 288 },
182         { canLoadAVCaptureSessionPreset320x240(), AVCaptureSessionPreset320x240, 320, 240 },
183     };
184
185     auto* presetsMap = &videoPresets();
186     for (auto& preset : presets) {
187         if (!preset.symbolAvailable || !preset.name || ![device supportsAVCaptureSessionPreset:preset.name])
188             continue;
189
190         presetsMap->add(String(preset.name), IntSize(preset.width, preset.height));
191     }
192
193 #if PLATFORM(IOS)
194     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
195     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
196     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
197     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
198 #endif
199
200     setPersistentID(String(device.uniqueID));
201 }
202
203 AVVideoCaptureSource::~AVVideoCaptureSource()
204 {
205 #if PLATFORM(IOS)
206     RealtimeMediaSourceCenterMac::videoCaptureSourceFactory().unsetActiveSource(*this);
207 #endif
208     [m_objcObserver disconnect];
209
210     if (!m_session)
211         return;
212
213     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
214     if ([m_session isRunning])
215         [m_session stopRunning];
216
217 }
218
219 void AVVideoCaptureSource::startProducingData()
220 {
221     if (!m_session) {
222         if (!setupSession())
223             return;
224     }
225
226     if ([m_session isRunning])
227         return;
228
229     [m_objcObserver addNotificationObservers];
230     [m_session startRunning];
231 }
232
233 void AVVideoCaptureSource::stopProducingData()
234 {
235     if (!m_session)
236         return;
237
238     [m_objcObserver removeNotificationObservers];
239
240     if ([m_session isRunning])
241         [m_session stopRunning];
242
243     m_interruption = InterruptionReason::None;
244 #if PLATFORM(IOS)
245     m_session = nullptr;
246 #endif
247 }
248
249 static void updateSizeMinMax(int& min, int& max, int value)
250 {
251     min = std::min<int>(min, value);
252     max = std::max<int>(max, value);
253 }
254
255 static void updateAspectRatioMinMax(double& min, double& max, double value)
256 {
257     min = std::min<double>(min, value);
258     max = std::max<double>(max, value);
259 }
260
261 void AVVideoCaptureSource::beginConfiguration()
262 {
263     if (m_session)
264         [m_session beginConfiguration];
265 }
266
267 void AVVideoCaptureSource::commitConfiguration()
268 {
269     if (m_session)
270         [m_session commitConfiguration];
271 }
272
273 void AVVideoCaptureSource::settingsDidChange()
274 {
275     m_currentSettings = std::nullopt;
276     RealtimeMediaSource::settingsDidChange();
277 }
278
279 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings() const
280 {
281     if (m_currentSettings)
282         return *m_currentSettings;
283
284     RealtimeMediaSourceSettings settings;
285     if ([device() position] == AVCaptureDevicePositionFront)
286         settings.setFacingMode(RealtimeMediaSourceSettings::User);
287     else if ([device() position] == AVCaptureDevicePositionBack)
288         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
289     else
290         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
291
292     auto maxFrameDuration = [device() activeVideoMaxFrameDuration];
293     settings.setFrameRate(maxFrameDuration.timescale / maxFrameDuration.value);
294     settings.setWidth(m_width);
295     settings.setHeight(m_height);
296     settings.setDeviceId(id());
297
298     RealtimeMediaSourceSupportedConstraints supportedConstraints;
299     supportedConstraints.setSupportsDeviceId(true);
300     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
301     supportedConstraints.setSupportsWidth(true);
302     supportedConstraints.setSupportsHeight(true);
303     supportedConstraints.setSupportsAspectRatio(true);
304     supportedConstraints.setSupportsFrameRate(true);
305
306     settings.setSupportedConstraints(supportedConstraints);
307
308     m_currentSettings = WTFMove(settings);
309
310     return *m_currentSettings;
311 }
312
313 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities() const
314 {
315     if (m_capabilities)
316         return *m_capabilities;
317
318     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
319     capabilities.setDeviceId(id());
320     AVCaptureDeviceTypedef *videoDevice = device();
321
322     if ([videoDevice position] == AVCaptureDevicePositionFront)
323         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
324     if ([videoDevice position] == AVCaptureDevicePositionBack)
325         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
326
327     Float64 lowestFrameRateRange = std::numeric_limits<double>::infinity();
328     Float64 highestFrameRateRange = 0;
329     int minimumWidth = std::numeric_limits<int>::infinity();
330     int maximumWidth = 0;
331     int minimumHeight = std::numeric_limits<int>::infinity();
332     int maximumHeight = 0;
333     double minimumAspectRatio = std::numeric_limits<double>::infinity();
334     double maximumAspectRatio = 0;
335
336     for (AVCaptureDeviceFormatType *format in [videoDevice formats]) {
337
338         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges]) {
339             lowestFrameRateRange = std::min<Float64>(lowestFrameRateRange, range.minFrameRate);
340             highestFrameRateRange = std::max<Float64>(highestFrameRateRange, range.maxFrameRate);
341         }
342     }
343
344     for (auto& preset : m_supportedPresets) {
345         auto values = preset.value;
346         updateSizeMinMax(minimumWidth, maximumWidth, values.width());
347         updateSizeMinMax(minimumHeight, maximumHeight, values.height());
348         updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, static_cast<double>(values.width()) / values.height());
349     }
350     capabilities.setFrameRate(CapabilityValueOrRange(lowestFrameRateRange, highestFrameRateRange));
351     capabilities.setWidth(CapabilityValueOrRange(minimumWidth, maximumWidth));
352     capabilities.setHeight(CapabilityValueOrRange(minimumHeight, maximumHeight));
353     capabilities.setAspectRatio(CapabilityValueOrRange(minimumAspectRatio, maximumAspectRatio));
354
355     m_capabilities = WTFMove(capabilities);
356
357     return *m_capabilities;
358 }
359
360 bool AVVideoCaptureSource::applySize(const IntSize& size)
361 {
362     NSString *preset = bestSessionPresetForVideoDimensions(size.width(), size.height());
363     if (!preset || ![session() canSetSessionPreset:preset]) {
364         LOG(Media, "AVVideoCaptureSource::applySize(%p), unable find or set preset for width: %i, height: %i", this, size.width(), size.height());
365         return false;
366     }
367
368     return setPreset(preset);
369 }
370
371 IntSize AVVideoCaptureSource::sizeForPreset(NSString* preset)
372 {
373     if (!preset)
374         return { };
375
376     auto& presets = videoPresets();
377     auto it = presets.find(String(preset));
378     if (it != presets.end())
379         return { it->value.width(), it->value.height() };
380
381     return { };
382 }
383
384 bool AVVideoCaptureSource::setPreset(NSString *preset)
385 {
386     if (!session()) {
387         m_pendingPreset = preset;
388         return true;
389     }
390
391     auto size = sizeForPreset(preset);
392     if (m_presetSize == size)
393         return true;
394
395     m_presetSize = size;
396
397     @try {
398         session().sessionPreset = preset;
399 #if PLATFORM(MAC)
400         auto settingsDictionary = @{
401             (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(videoCaptureFormat),
402             (__bridge NSString *)kCVPixelBufferWidthKey: @(size.width()),
403             (__bridge NSString *)kCVPixelBufferHeightKey: @(size.height())
404         };
405         [m_videoOutput setVideoSettings:settingsDictionary];
406 #endif
407     } @catch(NSException *exception) {
408         LOG(Media, "AVVideoCaptureSource::applySize(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
409         return false;
410     }
411
412     return true;
413 }
414
415 bool AVVideoCaptureSource::applyFrameRate(double rate)
416 {
417     using namespace PAL;
418     double epsilon = 0.00001;
419     AVFrameRateRangeType *bestFrameRateRange = nil;
420     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
421         if (rate + epsilon >= [frameRateRange minFrameRate] && rate - epsilon <= [frameRateRange maxFrameRate]) {
422             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
423                 bestFrameRateRange = frameRateRange;
424         }
425     }
426
427     if (!bestFrameRateRange || !isFrameRateSupported(rate)) {
428         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), frame rate %f not supported by video device", this, rate);
429         return false;
430     }
431
432     NSError *error = nil;
433     @try {
434         if ([device() lockForConfiguration:&error]) {
435             if (bestFrameRateRange.minFrameRate == bestFrameRateRange.maxFrameRate) {
436                 [device() setActiveVideoMinFrameDuration:[bestFrameRateRange minFrameDuration]];
437                 [device() setActiveVideoMaxFrameDuration:[bestFrameRateRange maxFrameDuration]];
438             } else {
439                 [device() setActiveVideoMinFrameDuration: CMTimeMake(1, rate)];
440                 [device() setActiveVideoMaxFrameDuration: CMTimeMake(1, rate)];
441             }
442             [device() unlockForConfiguration];
443         }
444     } @catch(NSException *exception) {
445         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
446         return false;
447     }
448
449     if (error) {
450         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), failed to lock video device for configuration: %s", this, [[error localizedDescription] UTF8String]);
451         return false;
452     }
453
454     LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p) - set frame rate range to %f", this, rate);
455     return true;
456 }
457
458 void AVVideoCaptureSource::applySizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
459 {
460     if (width || height)
461         setPreset(bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)));
462
463     if (frameRate)
464         applyFrameRate(frameRate.value());
465 }
466
467 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
468 {
469 #if PLATFORM(IOS)
470     switch (videoOrientation) {
471     case AVCaptureVideoOrientationPortrait:
472         return 180;
473     case AVCaptureVideoOrientationPortraitUpsideDown:
474         return 0;
475     case AVCaptureVideoOrientationLandscapeRight:
476         return 90;
477     case AVCaptureVideoOrientationLandscapeLeft:
478         return -90;
479     }
480 #else
481     switch (videoOrientation) {
482     case AVCaptureVideoOrientationPortrait:
483         return 0;
484     case AVCaptureVideoOrientationPortraitUpsideDown:
485         return 180;
486     case AVCaptureVideoOrientationLandscapeRight:
487         return 90;
488     case AVCaptureVideoOrientationLandscapeLeft:
489         return -90;
490     }
491 #endif
492 }
493
494 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
495 {
496     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
497     return connection ? sensorOrientation([connection videoOrientation]) : 0;
498 }
499
500 bool AVVideoCaptureSource::setupSession()
501 {
502     if (m_session)
503         return true;
504
505     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
506     [m_session addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)nil];
507
508     [m_session beginConfiguration];
509     bool success = setupCaptureSession();
510     [m_session commitConfiguration];
511
512     if (!success)
513         captureFailed();
514
515     return success;
516 }
517
518 bool AVVideoCaptureSource::setupCaptureSession()
519 {
520 #if PLATFORM(IOS)
521     RealtimeMediaSourceCenterMac::videoCaptureSourceFactory().setActiveSource(*this);
522 #endif
523
524     NSError *error = nil;
525     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
526     if (error) {
527         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
528         return false;
529     }
530
531     if (![session() canAddInput:videoIn.get()]) {
532         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
533         return false;
534     }
535     [session() addInput:videoIn.get()];
536
537     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
538     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:videoCaptureFormat], kCVPixelBufferPixelFormatTypeKey, nil]);
539     if (m_pendingPreset) {
540 #if PLATFORM(MAC)
541         auto size = sizeForPreset(m_pendingPreset.get());
542         [settingsDictionary setObject:@(size.width()) forKey:(__bridge NSString *)kCVPixelBufferWidthKey];
543         [settingsDictionary setObject:@(size.height()) forKey:(__bridge NSString *)kCVPixelBufferHeightKey];
544 #endif
545     }
546
547     [m_videoOutput setVideoSettings:settingsDictionary.get()];
548     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
549     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
550
551     if (![session() canAddOutput:m_videoOutput.get()]) {
552         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
553         return false;
554     }
555     [session() addOutput:m_videoOutput.get()];
556
557 #if PLATFORM(IOS)
558     setPreset(m_pendingPreset.get());
559 #endif
560
561     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
562     computeSampleRotation();
563
564     return true;
565 }
566
567 void AVVideoCaptureSource::shutdownCaptureSession()
568 {
569     m_buffer = nullptr;
570     m_width = 0;
571     m_height = 0;
572 }
573
574 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
575 {
576 #if PLATFORM(IOS)
577     notifier.addObserver(*this);
578     orientationChanged(notifier.orientation());
579 #else
580     UNUSED_PARAM(notifier);
581 #endif
582 }
583
584 void AVVideoCaptureSource::orientationChanged(int orientation)
585 {
586     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
587     m_deviceOrientation = orientation;
588     computeSampleRotation();
589 }
590
591 void AVVideoCaptureSource::computeSampleRotation()
592 {
593     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
594     switch (m_sensorOrientation - m_deviceOrientation) {
595     case 0:
596         m_sampleRotation = MediaSample::VideoRotation::None;
597         break;
598     case 180:
599     case -180:
600         m_sampleRotation = MediaSample::VideoRotation::UpsideDown;
601         break;
602     case 90:
603         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
604         break;
605     case -90:
606     case -270:
607         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
608         break;
609     default:
610         ASSERT_NOT_REACHED();
611         m_sampleRotation = MediaSample::VideoRotation::None;
612     }
613 }
614
615 void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer, RetainPtr<AVCaptureConnectionType> connection)
616 {
617     if (!isProducingData() || muted())
618         return;
619
620     CMFormatDescriptionRef formatDescription = PAL::CMSampleBufferGetFormatDescription(sampleBuffer.get());
621     if (!formatDescription)
622         return;
623
624     m_buffer = sampleBuffer;
625     CMVideoDimensions dimensions = PAL::CMVideoFormatDescriptionGetDimensions(formatDescription);
626     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
627         std::swap(dimensions.width, dimensions.height);
628
629     if (dimensions.width != m_width || dimensions.height != m_height) {
630         m_width = dimensions.width;
631         m_height = dimensions.height;
632
633         settingsDidChange();
634     }
635
636     videoSampleAvailable(MediaSampleAVFObjC::create(m_buffer.get(), m_sampleRotation, [connection isVideoMirrored]));
637 }
638
639 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
640 {
641     RetainPtr<CMSampleBufferRef> buffer = sampleBuffer;
642     RetainPtr<AVCaptureConnectionType> connection = captureConnection;
643
644     scheduleDeferredTask([this, buffer, connection] {
645         this->processNewFrame(buffer, connection);
646     });
647 }
648
649 NSString* AVVideoCaptureSource::bestSessionPresetForVideoDimensions(std::optional<int> width, std::optional<int> height)
650 {
651     if (!width && !height)
652         return nil;
653
654     int widthValue = width ? width.value() : 0;
655     int heightValue = height ? height.value() : 0;
656
657     for (auto& preset : videoPresets()) {
658         auto size = preset.value;
659         NSString* name = preset.key;
660
661         if ((!widthValue || widthValue == size.width()) && (!heightValue || heightValue == size.height()))
662             return name;
663     }
664
665     return nil;
666 }
667
668 bool AVVideoCaptureSource::isFrameRateSupported(double frameRate)
669 {
670     double epsilon = 0.001;
671     for (AVFrameRateRangeType *range in [[device() activeFormat] videoSupportedFrameRateRanges]) {
672         if (frameRate + epsilon >= range.minFrameRate && frameRate - epsilon <= range.maxFrameRate)
673             return true;
674     }
675     return false;
676 }
677
678 bool AVVideoCaptureSource::supportsSizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
679 {
680     if (!height && !width && !frameRate)
681         return true;
682
683     if ((height || width) && !bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)))
684         return false;
685
686     if (!frameRate)
687         return true;
688
689     return isFrameRateSupported(frameRate.value());
690 }
691
692 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
693 {
694     scheduleDeferredTask([this, state] {
695         if ((state == m_isRunning) && (state == !muted()))
696             return;
697
698         m_isRunning = state;
699         notifyMutedChange(!m_isRunning);
700     });
701 }
702
703 bool AVVideoCaptureSource::interrupted() const
704 {
705     if (m_interruption != InterruptionReason::None)
706         return true;
707
708     return RealtimeMediaSource::interrupted();
709 }
710
711 #if PLATFORM(IOS)
712 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
713 {
714     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
715         return;
716
717     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
718     [m_session startRunning];
719     m_isRunning = [m_session isRunning];
720 }
721
722 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
723 {
724     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
725 }
726
727 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
728 {
729     InterruptionReason reason = m_interruption;
730
731     m_interruption = InterruptionReason::None;
732     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
733         return;
734
735     [m_session startRunning];
736     m_isRunning = [m_session isRunning];
737 }
738 #endif
739
740 } // namespace WebCore
741
742 @implementation WebCoreAVVideoCaptureSourceObserver
743
744 - (id)initWithCallback:(AVVideoCaptureSource*)callback
745 {
746     self = [super init];
747     if (!self)
748         return nil;
749
750     m_callback = callback;
751
752     return self;
753 }
754
755 - (void)disconnect
756 {
757     [NSObject cancelPreviousPerformRequestsWithTarget:self];
758     [self removeNotificationObservers];
759     m_callback = nullptr;
760 }
761
762 - (void)addNotificationObservers
763 {
764 #if PLATFORM(IOS)
765     ASSERT(m_callback);
766
767     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
768     AVCaptureSessionType* session = m_callback->session();
769
770     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
771     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
772     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
773 #endif
774 }
775
776 - (void)removeNotificationObservers
777 {
778 #if PLATFORM(IOS)
779     [[NSNotificationCenter defaultCenter] removeObserver:self];
780 #endif
781 }
782
783 - (void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection
784 {
785     if (!m_callback)
786         return;
787
788     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
789 }
790
791 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
792 {
793     UNUSED_PARAM(object);
794     UNUSED_PARAM(context);
795
796     if (!m_callback)
797         return;
798
799     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
800
801 #if !LOG_DISABLED
802     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
803
804     if (willChange)
805         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
806     else {
807         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
808         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
809     }
810 #endif
811
812     if ([keyPath isEqualToString:@"running"])
813         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
814 }
815
816 #if PLATFORM(IOS)
817 - (void)sessionRuntimeError:(NSNotification*)notification
818 {
819     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
820     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::sessionRuntimeError(%p) - error = %s", self, [[error localizedDescription] UTF8String]);
821
822     if (m_callback)
823         m_callback->captureSessionRuntimeError(error);
824 }
825
826 - (void)beginSessionInterrupted:(NSNotification*)notification
827 {
828     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::beginSessionInterrupted(%p) - reason = %d", self, [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
829
830     if (m_callback)
831         m_callback->captureSessionBeginInterruption(notification);
832 }
833
834 - (void)endSessionInterrupted:(NSNotification*)notification
835 {
836     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::endSessionInterrupted(%p)", self);
837
838     if (m_callback)
839         m_callback->captureSessionEndInterruption(notification);
840 }
841 #endif
842
843 @end
844
845 #endif // ENABLE(MEDIA_STREAM)