[MediaStream] AVVideoCaptureSource reports incorrect size when frames are scaled
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "ImageTransferSessionVT.h"
33 #import "IntRect.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "MediaSampleAVFObjC.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenterMac.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoUtilities.h"
41 #import <AVFoundation/AVCaptureDevice.h>
42 #import <AVFoundation/AVCaptureInput.h>
43 #import <AVFoundation/AVCaptureOutput.h>
44 #import <AVFoundation/AVCaptureSession.h>
45 #import <AVFoundation/AVError.h>
46 #import <objc/runtime.h>
47
48 #import <pal/cf/CoreMediaSoftLink.h>
49 #import "CoreVideoSoftLink.h"
50
51 typedef AVCaptureConnection AVCaptureConnectionType;
52 typedef AVCaptureDevice AVCaptureDeviceTypedef;
53 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
54 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
55 typedef AVCaptureOutput AVCaptureOutputType;
56 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
57 typedef AVFrameRateRange AVFrameRateRangeType;
58 typedef AVCaptureSession AVCaptureSessionType;
59
60 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
61
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
68 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
69 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
70
71 #define AVCaptureConnection getAVCaptureConnectionClass()
72 #define AVCaptureDevice getAVCaptureDeviceClass()
73 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
74 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
75 #define AVCaptureOutput getAVCaptureOutputClass()
76 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
77 #define AVFrameRateRange getAVFrameRateRangeClass()
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
80
81 #if PLATFORM(IOS_FAMILY)
82 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionRuntimeErrorNotification, NSString *)
83 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionWasInterruptedNotification, NSString *)
84 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionEndedNotification, NSString *)
85 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionReasonKey, NSString *)
86 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionErrorKey, NSString *)
87
88 #define AVCaptureSessionRuntimeErrorNotification getAVCaptureSessionRuntimeErrorNotification()
89 #define AVCaptureSessionWasInterruptedNotification getAVCaptureSessionWasInterruptedNotification()
90 #define AVCaptureSessionInterruptionEndedNotification getAVCaptureSessionInterruptionEndedNotification()
91 #define AVCaptureSessionInterruptionReasonKey getAVCaptureSessionInterruptionReasonKey()
92 #define AVCaptureSessionErrorKey getAVCaptureSessionErrorKey()
93 #endif
94
95 using namespace WebCore;
96 using namespace PAL;
97
98 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
99     AVVideoCaptureSource* m_callback;
100 }
101
102 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
103 -(void)disconnect;
104 -(void)addNotificationObservers;
105 -(void)removeNotificationObservers;
106 -(void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection;
107 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
108 #if PLATFORM(IOS_FAMILY)
109 -(void)sessionRuntimeError:(NSNotification*)notification;
110 -(void)beginSessionInterrupted:(NSNotification*)notification;
111 -(void)endSessionInterrupted:(NSNotification*)notification;
112 #endif
113 @end
114
115 namespace WebCore {
116
117 static inline OSType avVideoCapturePixelBufferFormat()
118 {
119     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
120 #if PLATFORM(MAC)
121     return kCVPixelFormatType_420YpCbCr8Planar;
122 #else
123     return preferedPixelBufferFormat();
124 #endif
125 }
126
127 static dispatch_queue_t globaVideoCaptureSerialQueue()
128 {
129     static dispatch_queue_t globalQueue;
130     static dispatch_once_t onceToken;
131     dispatch_once(&onceToken, ^{
132         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
133     });
134     return globalQueue;
135 }
136
137 class AVVideoPreset : public VideoPreset {
138 public:
139     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
140     {
141         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
142     }
143
144     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
145         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
146         , format(format)
147     {
148     }
149
150     RetainPtr<AVCaptureDeviceFormatType> format;
151 };
152
153 CaptureSourceOrError AVVideoCaptureSource::create(String&& id, String&& hashSalt, const MediaConstraints* constraints)
154 {
155     AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:id];
156     if (!device)
157         return { };
158
159     auto source = adoptRef(*new AVVideoCaptureSource(device, WTFMove(id), WTFMove(hashSalt)));
160     if (constraints) {
161         auto result = source->applyConstraints(*constraints);
162         if (result)
163             return WTFMove(result.value().first);
164     }
165
166     return CaptureSourceOrError(WTFMove(source));
167 }
168
169 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, String&& id, String&& hashSalt)
170     : RealtimeVideoSource(device.localizedName, WTFMove(id), WTFMove(hashSalt))
171     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
172     , m_device(device)
173 {
174 #if PLATFORM(IOS_FAMILY)
175     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
176     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
177     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
178     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
179 #endif
180 }
181
182 AVVideoCaptureSource::~AVVideoCaptureSource()
183 {
184 #if PLATFORM(IOS_FAMILY)
185     RealtimeMediaSourceCenter::singleton().videoFactory().unsetActiveSource(*this);
186 #endif
187     [m_objcObserver disconnect];
188
189     if (!m_session)
190         return;
191
192     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
193     if ([m_session isRunning])
194         [m_session stopRunning];
195 }
196
197 void AVVideoCaptureSource::startProducingData()
198 {
199     if (!m_session) {
200         if (!setupSession())
201             return;
202     }
203
204     if ([m_session isRunning])
205         return;
206
207     [m_objcObserver addNotificationObservers];
208     [m_session startRunning];
209 }
210
211 void AVVideoCaptureSource::stopProducingData()
212 {
213     if (!m_session)
214         return;
215
216     [m_objcObserver removeNotificationObservers];
217
218     if ([m_session isRunning])
219         [m_session stopRunning];
220
221     m_interruption = InterruptionReason::None;
222 #if PLATFORM(IOS_FAMILY)
223     m_session = nullptr;
224 #endif
225 }
226
227 void AVVideoCaptureSource::beginConfiguration()
228 {
229     if (m_session)
230         [m_session beginConfiguration];
231 }
232
233 void AVVideoCaptureSource::commitConfiguration()
234 {
235     if (m_session)
236         [m_session commitConfiguration];
237 }
238
239 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
240 {
241     m_currentSettings = std::nullopt;
242 }
243
244 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
245 {
246     if (m_currentSettings)
247         return *m_currentSettings;
248
249     RealtimeMediaSourceSettings settings;
250     if ([device() position] == AVCaptureDevicePositionFront)
251         settings.setFacingMode(RealtimeMediaSourceSettings::User);
252     else if ([device() position] == AVCaptureDevicePositionBack)
253         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
254     else
255         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
256
257     settings.setFrameRate(frameRate());
258     auto& size = this->size();
259     settings.setWidth(size.width());
260     settings.setHeight(size.height());
261     settings.setDeviceId(hashedId());
262
263     RealtimeMediaSourceSupportedConstraints supportedConstraints;
264     supportedConstraints.setSupportsDeviceId(true);
265     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
266     supportedConstraints.setSupportsWidth(true);
267     supportedConstraints.setSupportsHeight(true);
268     supportedConstraints.setSupportsAspectRatio(true);
269     supportedConstraints.setSupportsFrameRate(true);
270
271     settings.setSupportedConstraints(supportedConstraints);
272
273     m_currentSettings = WTFMove(settings);
274
275     return *m_currentSettings;
276 }
277
278 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
279 {
280     if (m_capabilities)
281         return *m_capabilities;
282
283     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
284     capabilities.setDeviceId(hashedId());
285
286     AVCaptureDeviceTypedef *videoDevice = device();
287     if ([videoDevice position] == AVCaptureDevicePositionFront)
288         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
289     if ([videoDevice position] == AVCaptureDevicePositionBack)
290         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
291
292     updateCapabilities(capabilities);
293
294     m_capabilities = WTFMove(capabilities);
295
296     return *m_capabilities;
297 }
298
299 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
300 {
301 #if PLATFORM(IOS_FAMILY)
302     return [static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
303 #else
304     UNUSED_PARAM(preset);
305 #endif
306
307     return true;
308 }
309
310 void AVVideoCaptureSource::setSizeAndFrameRateWithPreset(IntSize requestedSize, double requestedFrameRate, RefPtr<VideoPreset> preset)
311 {
312     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
313
314     if (!m_session) {
315         m_pendingPreset = avPreset;
316         m_pendingSize = requestedSize;
317         m_pendingFrameRate = requestedFrameRate;
318         return;
319     }
320
321     m_pendingPreset = nullptr;
322     m_pendingFrameRate = 0;
323
324     auto* frameRateRange = frameDurationForFrameRate(requestedFrameRate);
325     ASSERT(frameRateRange);
326     if (!frameRateRange)
327         return;
328
329     if (!avPreset)
330         return;
331
332     ASSERT(avPreset->format);
333
334     NSError *error = nil;
335     [m_session beginConfiguration];
336     @try {
337         if ([device() lockForConfiguration:&error]) {
338             if (!m_currentPreset || ![m_currentPreset->format.get() isEqual:avPreset->format.get()]) {
339                 [device() setActiveFormat:avPreset->format.get()];
340 #if PLATFORM(MAC)
341                 auto settingsDictionary = @{
342                     (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
343                     (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
344                     (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height())
345                 };
346                 [m_videoOutput setVideoSettings:settingsDictionary];
347 #endif
348             }
349             [device() setActiveVideoMinFrameDuration:[frameRateRange minFrameDuration]];
350             [device() setActiveVideoMaxFrameDuration:[frameRateRange maxFrameDuration]];
351             [device() unlockForConfiguration];
352         }
353     } @catch(NSException *exception) {
354         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - exception thrown configuring device: <%s> %s", [[exception name] UTF8String], [[exception reason] UTF8String]);
355     }
356     [m_session commitConfiguration];
357
358     m_currentPreset = avPreset;
359
360     if (error)
361         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - failed to lock video device for configuration: %s", [[error localizedDescription] UTF8String]);
362 }
363
364 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
365 {
366 #if PLATFORM(IOS_FAMILY)
367     switch (videoOrientation) {
368     case AVCaptureVideoOrientationPortrait:
369         return 180;
370     case AVCaptureVideoOrientationPortraitUpsideDown:
371         return 0;
372     case AVCaptureVideoOrientationLandscapeRight:
373         return 90;
374     case AVCaptureVideoOrientationLandscapeLeft:
375         return -90;
376     }
377 #else
378     switch (videoOrientation) {
379     case AVCaptureVideoOrientationPortrait:
380         return 0;
381     case AVCaptureVideoOrientationPortraitUpsideDown:
382         return 180;
383     case AVCaptureVideoOrientationLandscapeRight:
384         return 90;
385     case AVCaptureVideoOrientationLandscapeLeft:
386         return -90;
387     }
388 #endif
389 }
390
391 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
392 {
393     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
394     return connection ? sensorOrientation([connection videoOrientation]) : 0;
395 }
396
397 bool AVVideoCaptureSource::setupSession()
398 {
399     if (m_session)
400         return true;
401
402     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
403     [m_session addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)nil];
404
405     [m_session beginConfiguration];
406     bool success = setupCaptureSession();
407     [m_session commitConfiguration];
408
409     if (!success)
410         captureFailed();
411
412     return success;
413 }
414
415 AVFrameRateRangeType* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
416 {
417     AVFrameRateRangeType *bestFrameRateRange = nil;
418     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
419         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
420             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
421                 bestFrameRateRange = frameRateRange;
422         }
423     }
424
425     if (!bestFrameRateRange)
426         RELEASE_LOG(Media, "AVVideoCaptureSource::frameDurationForFrameRate, no frame rate range for rate %g", rate);
427
428     return bestFrameRateRange;
429 }
430
431 bool AVVideoCaptureSource::setupCaptureSession()
432 {
433 #if PLATFORM(IOS_FAMILY)
434     RealtimeMediaSourceCenter::singleton().videoFactory().setActiveSource(*this);
435 #endif
436
437     NSError *error = nil;
438     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
439     if (error) {
440         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
441         return false;
442     }
443
444     if (![session() canAddInput:videoIn.get()]) {
445         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
446         return false;
447     }
448     [session() addInput:videoIn.get()];
449
450     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
451     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
452
453     [m_videoOutput setVideoSettings:settingsDictionary.get()];
454     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
455     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
456
457     if (![session() canAddOutput:m_videoOutput.get()]) {
458         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
459         return false;
460     }
461     [session() addOutput:m_videoOutput.get()];
462
463     if (m_pendingPreset || m_pendingFrameRate)
464         setSizeAndFrameRateWithPreset(m_pendingSize, m_pendingFrameRate, m_pendingPreset);
465
466     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
467     computeSampleRotation();
468
469     return true;
470 }
471
472 void AVVideoCaptureSource::shutdownCaptureSession()
473 {
474     m_buffer = nullptr;
475 }
476
477 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
478 {
479 #if PLATFORM(IOS_FAMILY)
480     notifier.addObserver(*this);
481     orientationChanged(notifier.orientation());
482 #else
483     UNUSED_PARAM(notifier);
484 #endif
485 }
486
487 void AVVideoCaptureSource::orientationChanged(int orientation)
488 {
489     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
490     m_deviceOrientation = orientation;
491     computeSampleRotation();
492 }
493
494 void AVVideoCaptureSource::computeSampleRotation()
495 {
496     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
497     switch (m_sensorOrientation - m_deviceOrientation) {
498     case 0:
499         m_sampleRotation = MediaSample::VideoRotation::None;
500         break;
501     case 180:
502     case -180:
503         m_sampleRotation = MediaSample::VideoRotation::UpsideDown;
504         break;
505     case 90:
506         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
507         break;
508     case -90:
509     case -270:
510         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
511         break;
512     default:
513         ASSERT_NOT_REACHED();
514         m_sampleRotation = MediaSample::VideoRotation::None;
515     }
516 }
517
518 void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
519 {
520     if (!isProducingData() || muted())
521         return;
522
523     m_buffer = &sample.get();
524     dispatchMediaSampleToObservers(WTFMove(sample));
525 }
526
527 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
528 {
529     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
530     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
531         processNewFrame(WTFMove(sample));
532     });
533 }
534
535 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
536 {
537     scheduleDeferredTask([this, state] {
538         if ((state == m_isRunning) && (state == !muted()))
539             return;
540
541         m_isRunning = state;
542         notifyMutedChange(!m_isRunning);
543     });
544 }
545
546 bool AVVideoCaptureSource::interrupted() const
547 {
548     if (m_interruption != InterruptionReason::None)
549         return true;
550
551     return RealtimeMediaSource::interrupted();
552 }
553
554 void AVVideoCaptureSource::generatePresets()
555 {
556     Vector<Ref<VideoPreset>> presets;
557     for (AVCaptureDeviceFormatType* format in [device() formats]) {
558
559         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
560         IntSize size = {dimensions.width, dimensions.height};
561         auto index = presets.findMatching([&size](auto& preset) {
562             return size == preset->size;
563         });
564         if (index != notFound)
565             continue;
566
567         Vector<FrameRateRange> frameRates;
568         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges])
569             frameRates.append({ range.minFrameRate, range.maxFrameRate});
570
571         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
572     }
573
574     setSupportedPresets(WTFMove(presets));
575 }
576
577 #if PLATFORM(IOS_FAMILY)
578 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
579 {
580     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
581         return;
582
583     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
584     [m_session startRunning];
585     m_isRunning = [m_session isRunning];
586 }
587
588 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
589 {
590     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
591 }
592
593 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
594 {
595     InterruptionReason reason = m_interruption;
596
597     m_interruption = InterruptionReason::None;
598     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
599         return;
600
601     [m_session startRunning];
602     m_isRunning = [m_session isRunning];
603 }
604 #endif
605
606 } // namespace WebCore
607
608 @implementation WebCoreAVVideoCaptureSourceObserver
609
610 - (id)initWithCallback:(AVVideoCaptureSource*)callback
611 {
612     self = [super init];
613     if (!self)
614         return nil;
615
616     m_callback = callback;
617
618     return self;
619 }
620
621 - (void)disconnect
622 {
623     [NSObject cancelPreviousPerformRequestsWithTarget:self];
624     [self removeNotificationObservers];
625     m_callback = nullptr;
626 }
627
628 - (void)addNotificationObservers
629 {
630 #if PLATFORM(IOS_FAMILY)
631     ASSERT(m_callback);
632
633     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
634     AVCaptureSessionType* session = m_callback->session();
635
636     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
637     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
638     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
639 #endif
640 }
641
642 - (void)removeNotificationObservers
643 {
644 #if PLATFORM(IOS_FAMILY)
645     [[NSNotificationCenter defaultCenter] removeObserver:self];
646 #endif
647 }
648
649 - (void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection
650 {
651     if (!m_callback)
652         return;
653
654     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
655 }
656
657 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
658 {
659     UNUSED_PARAM(object);
660     UNUSED_PARAM(context);
661
662     if (!m_callback)
663         return;
664
665     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
666
667 #if !LOG_DISABLED
668     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
669
670     if (willChange)
671         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
672     else {
673         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
674         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
675     }
676 #endif
677
678     if ([keyPath isEqualToString:@"running"])
679         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
680 }
681
682 #if PLATFORM(IOS_FAMILY)
683 - (void)sessionRuntimeError:(NSNotification*)notification
684 {
685     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
686     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::sessionRuntimeError(%p) - error = %s", self, [[error localizedDescription] UTF8String]);
687
688     if (m_callback)
689         m_callback->captureSessionRuntimeError(error);
690 }
691
692 - (void)beginSessionInterrupted:(NSNotification*)notification
693 {
694     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::beginSessionInterrupted(%p) - reason = %d", self, [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
695
696     if (m_callback)
697         m_callback->captureSessionBeginInterruption(notification);
698 }
699
700 - (void)endSessionInterrupted:(NSNotification*)notification
701 {
702     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::endSessionInterrupted(%p)", self);
703
704     if (m_callback)
705         m_callback->captureSessionEndInterruption(notification);
706 }
707 #endif
708
709 @end
710
711 #endif // ENABLE(MEDIA_STREAM)