9a661c793e40bb11afde50634a5630264e0554b3
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "ImageTransferSessionVT.h"
33 #import "IntRect.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "MediaSampleAVFObjC.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenter.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoUtilities.h"
41 #import <AVFoundation/AVCaptureDevice.h>
42 #import <AVFoundation/AVCaptureInput.h>
43 #import <AVFoundation/AVCaptureOutput.h>
44 #import <AVFoundation/AVCaptureSession.h>
45 #import <AVFoundation/AVError.h>
46 #import <objc/runtime.h>
47
48 #import <pal/cf/CoreMediaSoftLink.h>
49 #import "CoreVideoSoftLink.h"
50
51 typedef AVCaptureConnection AVCaptureConnectionType;
52 typedef AVCaptureDevice AVCaptureDeviceTypedef;
53 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
54 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
55 typedef AVCaptureOutput AVCaptureOutputType;
56 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
57 typedef AVFrameRateRange AVFrameRateRangeType;
58 typedef AVCaptureSession AVCaptureSessionType;
59
60 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
61
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
68 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
69 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
70
71 #define AVCaptureConnection getAVCaptureConnectionClass()
72 #define AVCaptureDevice getAVCaptureDeviceClass()
73 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
74 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
75 #define AVCaptureOutput getAVCaptureOutputClass()
76 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
77 #define AVFrameRateRange getAVFrameRateRangeClass()
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
80
81 SOFT_LINK_CONSTANT(AVFoundation, AVCaptureDeviceWasDisconnectedNotification, NSString *)
82 #define AVCaptureDeviceWasDisconnectedNotification getAVCaptureDeviceWasDisconnectedNotification()
83
84 #if PLATFORM(IOS_FAMILY)
85 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionRuntimeErrorNotification, NSString *)
86 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionWasInterruptedNotification, NSString *)
87 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionEndedNotification, NSString *)
88 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionReasonKey, NSString *)
89 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionErrorKey, NSString *)
90
91 #define AVCaptureSessionRuntimeErrorNotification getAVCaptureSessionRuntimeErrorNotification()
92 #define AVCaptureSessionWasInterruptedNotification getAVCaptureSessionWasInterruptedNotification()
93 #define AVCaptureSessionInterruptionEndedNotification getAVCaptureSessionInterruptionEndedNotification()
94 #define AVCaptureSessionInterruptionReasonKey getAVCaptureSessionInterruptionReasonKey()
95 #define AVCaptureSessionErrorKey getAVCaptureSessionErrorKey()
96 #endif
97
98 using namespace WebCore;
99 using namespace PAL;
100
101 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
102     AVVideoCaptureSource* m_callback;
103 }
104
105 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
106 -(void)disconnect;
107 -(void)addNotificationObservers;
108 -(void)removeNotificationObservers;
109 -(void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection;
110 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
111 #if PLATFORM(IOS_FAMILY)
112 -(void)sessionRuntimeError:(NSNotification*)notification;
113 -(void)beginSessionInterrupted:(NSNotification*)notification;
114 -(void)endSessionInterrupted:(NSNotification*)notification;
115 -(void)deviceConnectedDidChange:(NSNotification*)notification;
116 #endif
117 @end
118
119 namespace WebCore {
120
121 static inline OSType avVideoCapturePixelBufferFormat()
122 {
123     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
124 #if PLATFORM(MAC)
125     return kCVPixelFormatType_420YpCbCr8Planar;
126 #else
127     return preferedPixelBufferFormat();
128 #endif
129 }
130
131 static dispatch_queue_t globaVideoCaptureSerialQueue()
132 {
133     static dispatch_queue_t globalQueue;
134     static dispatch_once_t onceToken;
135     dispatch_once(&onceToken, ^{
136         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
137     });
138     return globalQueue;
139 }
140
141 class AVVideoPreset : public VideoPreset {
142 public:
143     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
144     {
145         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
146     }
147
148     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
149         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
150         , format(format)
151     {
152     }
153
154     RetainPtr<AVCaptureDeviceFormatType> format;
155 };
156
157 CaptureSourceOrError AVVideoCaptureSource::create(String&& id, String&& hashSalt, const MediaConstraints* constraints)
158 {
159     AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:id];
160     if (!device)
161         return { };
162
163     auto source = adoptRef(*new AVVideoCaptureSource(device, WTFMove(id), WTFMove(hashSalt)));
164     if (constraints) {
165         auto result = source->applyConstraints(*constraints);
166         if (result)
167             return WTFMove(result.value().badConstraint);
168     }
169
170     return CaptureSourceOrError(WTFMove(source));
171 }
172
173 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, String&& id, String&& hashSalt)
174     : RealtimeVideoSource(device.localizedName, WTFMove(id), WTFMove(hashSalt))
175     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
176     , m_device(device)
177 {
178 #if PLATFORM(IOS_FAMILY)
179     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
180     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
181     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
182     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
183 #endif
184
185     [m_device.get() addObserver:m_objcObserver.get() forKeyPath:@"suspended" options:NSKeyValueObservingOptionNew context:(void *)nil];
186 }
187
188 AVVideoCaptureSource::~AVVideoCaptureSource()
189 {
190 #if PLATFORM(IOS_FAMILY)
191     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().unsetActiveSource(*this);
192 #endif
193     [m_objcObserver disconnect];
194     [m_device removeObserver:m_objcObserver.get() forKeyPath:@"suspended"];
195
196     if (!m_session)
197         return;
198
199     if ([m_session isRunning])
200         [m_session stopRunning];
201
202     clearSession();
203 }
204
205 void AVVideoCaptureSource::initializeSession()
206 {
207     ASSERT(!m_session);
208     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
209     [m_session addObserver:m_objcObserver.get() forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:(void *)nil];
210 }
211
212 void AVVideoCaptureSource::clearSession()
213 {
214     ASSERT(m_session);
215     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"running"];
216     m_session = nullptr;
217 }
218
219 void AVVideoCaptureSource::startProducingData()
220 {
221     if (!m_session) {
222         if (!setupSession())
223             return;
224     }
225
226     if ([m_session isRunning])
227         return;
228
229     [m_objcObserver addNotificationObservers];
230     [m_session startRunning];
231 }
232
233 void AVVideoCaptureSource::stopProducingData()
234 {
235     if (!m_session)
236         return;
237
238     [m_objcObserver removeNotificationObservers];
239
240     if ([m_session isRunning])
241         [m_session stopRunning];
242
243     m_interruption = InterruptionReason::None;
244 #if PLATFORM(IOS_FAMILY)
245     clearSession();
246 #endif
247 }
248
249 void AVVideoCaptureSource::beginConfiguration()
250 {
251     if (m_session)
252         [m_session beginConfiguration];
253 }
254
255 void AVVideoCaptureSource::commitConfiguration()
256 {
257     if (m_session)
258         [m_session commitConfiguration];
259 }
260
261 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
262 {
263     m_currentSettings = WTF::nullopt;
264 }
265
266 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
267 {
268     if (m_currentSettings)
269         return *m_currentSettings;
270
271     RealtimeMediaSourceSettings settings;
272     if ([device() position] == AVCaptureDevicePositionFront)
273         settings.setFacingMode(RealtimeMediaSourceSettings::User);
274     else if ([device() position] == AVCaptureDevicePositionBack)
275         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
276     else
277         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
278
279     settings.setFrameRate(frameRate());
280
281     auto size = this->size();
282     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
283         size = size.transposedSize();
284     
285     settings.setWidth(size.width());
286     settings.setHeight(size.height());
287     settings.setDeviceId(hashedId());
288
289     RealtimeMediaSourceSupportedConstraints supportedConstraints;
290     supportedConstraints.setSupportsDeviceId(true);
291     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
292     supportedConstraints.setSupportsWidth(true);
293     supportedConstraints.setSupportsHeight(true);
294     supportedConstraints.setSupportsAspectRatio(true);
295     supportedConstraints.setSupportsFrameRate(true);
296
297     settings.setSupportedConstraints(supportedConstraints);
298
299     m_currentSettings = WTFMove(settings);
300
301     return *m_currentSettings;
302 }
303
304 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
305 {
306     if (m_capabilities)
307         return *m_capabilities;
308
309     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
310     capabilities.setDeviceId(hashedId());
311
312     AVCaptureDeviceTypedef *videoDevice = device();
313     if ([videoDevice position] == AVCaptureDevicePositionFront)
314         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
315     if ([videoDevice position] == AVCaptureDevicePositionBack)
316         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
317
318     updateCapabilities(capabilities);
319
320     m_capabilities = WTFMove(capabilities);
321
322     return *m_capabilities;
323 }
324
325 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
326 {
327 #if PLATFORM(IOS_FAMILY)
328     return [static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
329 #else
330     UNUSED_PARAM(preset);
331 #endif
332
333     return true;
334 }
335
336 void AVVideoCaptureSource::setSizeAndFrameRateWithPreset(IntSize requestedSize, double requestedFrameRate, RefPtr<VideoPreset> preset)
337 {
338     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
339
340     if (!m_session) {
341         m_pendingPreset = avPreset;
342         m_pendingSize = requestedSize;
343         m_pendingFrameRate = requestedFrameRate;
344         return;
345     }
346
347     m_pendingPreset = nullptr;
348     m_pendingFrameRate = 0;
349
350     auto* frameRateRange = frameDurationForFrameRate(requestedFrameRate);
351     ASSERT(frameRateRange);
352     if (!frameRateRange)
353         return;
354
355     if (!avPreset)
356         return;
357
358     ASSERT(avPreset->format);
359
360     NSError *error = nil;
361     [m_session beginConfiguration];
362     @try {
363         if ([device() lockForConfiguration:&error]) {
364             if (!m_currentPreset || ![m_currentPreset->format.get() isEqual:avPreset->format.get()]) {
365                 [device() setActiveFormat:avPreset->format.get()];
366 #if PLATFORM(MAC)
367                 auto settingsDictionary = @{
368                     (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
369                     (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
370                     (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height()),
371                     (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{ }
372                 };
373                 [m_videoOutput setVideoSettings:settingsDictionary];
374 #endif
375             }
376             [device() setActiveVideoMinFrameDuration:[frameRateRange minFrameDuration]];
377             [device() setActiveVideoMaxFrameDuration:[frameRateRange maxFrameDuration]];
378             [device() unlockForConfiguration];
379         }
380     } @catch(NSException *exception) {
381         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - exception thrown configuring device: <%s> %s", [[exception name] UTF8String], [[exception reason] UTF8String]);
382     }
383     [m_session commitConfiguration];
384
385     m_currentPreset = avPreset;
386
387     if (error)
388         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - failed to lock video device for configuration: %s", [[error localizedDescription] UTF8String]);
389 }
390
391 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
392 {
393 #if PLATFORM(IOS_FAMILY)
394     switch (videoOrientation) {
395     case AVCaptureVideoOrientationPortrait:
396         return 180;
397     case AVCaptureVideoOrientationPortraitUpsideDown:
398         return 0;
399     case AVCaptureVideoOrientationLandscapeRight:
400         return 90;
401     case AVCaptureVideoOrientationLandscapeLeft:
402         return -90;
403     }
404 #else
405     switch (videoOrientation) {
406     case AVCaptureVideoOrientationPortrait:
407         return 0;
408     case AVCaptureVideoOrientationPortraitUpsideDown:
409         return 180;
410     case AVCaptureVideoOrientationLandscapeRight:
411         return 90;
412     case AVCaptureVideoOrientationLandscapeLeft:
413         return -90;
414     }
415 #endif
416 }
417
418 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
419 {
420     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
421     return connection ? sensorOrientation([connection videoOrientation]) : 0;
422 }
423
424 bool AVVideoCaptureSource::setupSession()
425 {
426     if (m_session)
427         return true;
428
429     initializeSession();
430
431     [m_session beginConfiguration];
432     bool success = setupCaptureSession();
433     [m_session commitConfiguration];
434
435     if (!success)
436         captureFailed();
437
438     return success;
439 }
440
441 AVFrameRateRangeType* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
442 {
443     AVFrameRateRangeType *bestFrameRateRange = nil;
444     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
445         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
446             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
447                 bestFrameRateRange = frameRateRange;
448         }
449     }
450
451     if (!bestFrameRateRange)
452         RELEASE_LOG(Media, "AVVideoCaptureSource::frameDurationForFrameRate, no frame rate range for rate %g", rate);
453
454     return bestFrameRateRange;
455 }
456
457 bool AVVideoCaptureSource::setupCaptureSession()
458 {
459 #if PLATFORM(IOS_FAMILY)
460     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().setActiveSource(*this);
461 #endif
462
463     NSError *error = nil;
464     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
465     if (error) {
466         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
467         return false;
468     }
469
470     if (![session() canAddInput:videoIn.get()]) {
471         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
472         return false;
473     }
474     [session() addInput:videoIn.get()];
475
476     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
477     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
478
479     [m_videoOutput setVideoSettings:settingsDictionary.get()];
480     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
481     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
482
483     if (![session() canAddOutput:m_videoOutput.get()]) {
484         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
485         return false;
486     }
487     [session() addOutput:m_videoOutput.get()];
488
489     if (m_pendingPreset || m_pendingFrameRate)
490         setSizeAndFrameRateWithPreset(m_pendingSize, m_pendingFrameRate, m_pendingPreset);
491
492     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
493     computeSampleRotation();
494
495     return true;
496 }
497
498 void AVVideoCaptureSource::shutdownCaptureSession()
499 {
500     m_buffer = nullptr;
501 }
502
503 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
504 {
505 #if PLATFORM(IOS_FAMILY)
506     notifier.addObserver(*this);
507     orientationChanged(notifier.orientation());
508 #else
509     UNUSED_PARAM(notifier);
510 #endif
511 }
512
513 void AVVideoCaptureSource::orientationChanged(int orientation)
514 {
515     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
516     m_deviceOrientation = orientation;
517     computeSampleRotation();
518 }
519
520 void AVVideoCaptureSource::computeSampleRotation()
521 {
522     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
523     MediaSample::VideoRotation sampleRotation;
524     switch (m_sensorOrientation - m_deviceOrientation) {
525     case 0:
526         sampleRotation = MediaSample::VideoRotation::None;
527         break;
528     case 180:
529     case -180:
530         sampleRotation = MediaSample::VideoRotation::UpsideDown;
531         break;
532     case 90:
533         sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
534         break;
535     case -90:
536     case -270:
537         sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
538         break;
539     default:
540         ASSERT_NOT_REACHED();
541         sampleRotation = MediaSample::VideoRotation::None;
542     }
543     if (sampleRotation == m_sampleRotation)
544         return;
545
546     m_sampleRotation = sampleRotation;
547     notifySettingsDidChangeObservers({ RealtimeMediaSourceSettings::Flag::Width, RealtimeMediaSourceSettings::Flag::Height });
548 }
549
550 void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
551 {
552     if (!isProducingData() || muted())
553         return;
554
555     m_buffer = &sample.get();
556     setIntrinsicSize(expandedIntSize(sample->presentationSize()));
557     dispatchMediaSampleToObservers(WTFMove(sample));
558 }
559
560 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
561 {
562     if (m_framesToDropAtStartup && m_framesToDropAtStartup--)
563         return;
564
565     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
566     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
567         processNewFrame(WTFMove(sample));
568     });
569 }
570
571 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
572 {
573     scheduleDeferredTask([this, state] {
574         if ((state == m_isRunning) && (state == !muted()))
575             return;
576
577         m_isRunning = state;
578         if (m_isRunning)
579             m_framesToDropAtStartup = 4;
580
581         notifyMutedChange(!m_isRunning);
582     });
583 }
584
585 void AVVideoCaptureSource::captureDeviceSuspendedDidChange()
586 {
587 #if !PLATFORM(IOS_FAMILY)
588     scheduleDeferredTask([this] {
589         auto isSuspended = [m_device isSuspended];
590         if (isSuspended == muted())
591             return;
592
593         notifyMutedChange(isSuspended);
594     });
595 #endif
596 }
597
598 bool AVVideoCaptureSource::interrupted() const
599 {
600     if (m_interruption != InterruptionReason::None)
601         return true;
602
603     return RealtimeMediaSource::interrupted();
604 }
605
606 void AVVideoCaptureSource::generatePresets()
607 {
608     Vector<Ref<VideoPreset>> presets;
609     for (AVCaptureDeviceFormatType* format in [device() formats]) {
610
611         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
612         IntSize size = { dimensions.width, dimensions.height };
613         auto index = presets.findMatching([&size](auto& preset) {
614             return size == preset->size;
615         });
616         if (index != notFound)
617             continue;
618
619         Vector<FrameRateRange> frameRates;
620         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges])
621             frameRates.append({ range.minFrameRate, range.maxFrameRate});
622
623         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
624     }
625
626     setSupportedPresets(WTFMove(presets));
627 }
628
629 #if PLATFORM(IOS_FAMILY)
630 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
631 {
632     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
633         return;
634
635     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
636     [m_session startRunning];
637     m_isRunning = [m_session isRunning];
638 }
639
640 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
641 {
642     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
643 }
644
645 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
646 {
647     InterruptionReason reason = m_interruption;
648
649     m_interruption = InterruptionReason::None;
650     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
651         return;
652
653     [m_session startRunning];
654     m_isRunning = [m_session isRunning];
655 }
656 #endif
657
658 void AVVideoCaptureSource::deviceDisconnected(RetainPtr<NSNotification> notification)
659 {
660     if (this->device() == [notification object])
661         captureFailed();
662 }
663
664
665 } // namespace WebCore
666
667 @implementation WebCoreAVVideoCaptureSourceObserver
668
669 - (id)initWithCallback:(AVVideoCaptureSource*)callback
670 {
671     self = [super init];
672     if (!self)
673         return nil;
674
675     m_callback = callback;
676
677     return self;
678 }
679
680 - (void)disconnect
681 {
682     [NSObject cancelPreviousPerformRequestsWithTarget:self];
683     [self removeNotificationObservers];
684     m_callback = nullptr;
685 }
686
687 - (void)addNotificationObservers
688 {
689     ASSERT(m_callback);
690
691     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
692
693     [center addObserver:self selector:@selector(deviceConnectedDidChange:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
694
695 #if PLATFORM(IOS_FAMILY)
696     AVCaptureSessionType* session = m_callback->session();
697     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
698     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
699     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
700 #endif
701 }
702
703 - (void)removeNotificationObservers
704 {
705     [[NSNotificationCenter defaultCenter] removeObserver:self];
706 }
707
708 - (void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection
709 {
710     if (!m_callback)
711         return;
712
713     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
714 }
715
716 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
717 {
718     UNUSED_PARAM(object);
719     UNUSED_PARAM(context);
720
721     if (!m_callback)
722         return;
723
724     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
725
726     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
727 #if !LOG_DISABLED
728
729     if (willChange)
730         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
731     else {
732         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
733         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
734     }
735 #endif
736
737     if (!willChange && [keyPath isEqualToString:@"running"])
738         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
739     if (!willChange && [keyPath isEqualToString:@"suspended"])
740         m_callback->captureDeviceSuspendedDidChange();
741 }
742
743 - (void)deviceConnectedDidChange:(NSNotification*)notification
744 {
745     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::deviceConnectedDidChange(%p)", self);
746
747     if (m_callback)
748         m_callback->deviceDisconnected(notification);
749 }
750
751 #if PLATFORM(IOS_FAMILY)
752 - (void)sessionRuntimeError:(NSNotification*)notification
753 {
754     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
755     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::sessionRuntimeError(%p) - error = %s", self, [[error localizedDescription] UTF8String]);
756
757     if (m_callback)
758         m_callback->captureSessionRuntimeError(error);
759 }
760
761 - (void)beginSessionInterrupted:(NSNotification*)notification
762 {
763     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::beginSessionInterrupted(%p) - reason = %d", self, [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
764
765     if (m_callback)
766         m_callback->captureSessionBeginInterruption(notification);
767 }
768
769 - (void)endSessionInterrupted:(NSNotification*)notification
770 {
771     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::endSessionInterrupted(%p)", self);
772
773     if (m_callback)
774         m_callback->captureSessionEndInterruption(notification);
775 }
776 #endif
777
778 @end
779
780 #endif // ENABLE(MEDIA_STREAM)