83afed762fc612e4312a1156c295dd38a8cc4080
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "ImageTransferSessionVT.h"
33 #import "IntRect.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "MediaSampleAVFObjC.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenterMac.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoUtilities.h"
41 #import <AVFoundation/AVCaptureDevice.h>
42 #import <AVFoundation/AVCaptureInput.h>
43 #import <AVFoundation/AVCaptureOutput.h>
44 #import <AVFoundation/AVCaptureSession.h>
45 #import <AVFoundation/AVError.h>
46 #import <objc/runtime.h>
47
48 #import <pal/cf/CoreMediaSoftLink.h>
49 #import "CoreVideoSoftLink.h"
50
51 typedef AVCaptureConnection AVCaptureConnectionType;
52 typedef AVCaptureDevice AVCaptureDeviceTypedef;
53 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
54 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
55 typedef AVCaptureOutput AVCaptureOutputType;
56 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
57 typedef AVFrameRateRange AVFrameRateRangeType;
58 typedef AVCaptureSession AVCaptureSessionType;
59
60 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
61
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
68 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
69 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
70
71 #define AVCaptureConnection getAVCaptureConnectionClass()
72 #define AVCaptureDevice getAVCaptureDeviceClass()
73 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
74 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
75 #define AVCaptureOutput getAVCaptureOutputClass()
76 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
77 #define AVFrameRateRange getAVFrameRateRangeClass()
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
80
81 #if PLATFORM(IOS_FAMILY)
82 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionRuntimeErrorNotification, NSString *)
83 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionWasInterruptedNotification, NSString *)
84 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionEndedNotification, NSString *)
85 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionReasonKey, NSString *)
86 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionErrorKey, NSString *)
87
88 #define AVCaptureSessionRuntimeErrorNotification getAVCaptureSessionRuntimeErrorNotification()
89 #define AVCaptureSessionWasInterruptedNotification getAVCaptureSessionWasInterruptedNotification()
90 #define AVCaptureSessionInterruptionEndedNotification getAVCaptureSessionInterruptionEndedNotification()
91 #define AVCaptureSessionInterruptionReasonKey getAVCaptureSessionInterruptionReasonKey()
92 #define AVCaptureSessionErrorKey getAVCaptureSessionErrorKey()
93 #endif
94
95 using namespace WebCore;
96 using namespace PAL;
97
98 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
99     AVVideoCaptureSource* m_callback;
100 }
101
102 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
103 -(void)disconnect;
104 -(void)addNotificationObservers;
105 -(void)removeNotificationObservers;
106 -(void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection;
107 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
108 #if PLATFORM(IOS_FAMILY)
109 -(void)sessionRuntimeError:(NSNotification*)notification;
110 -(void)beginSessionInterrupted:(NSNotification*)notification;
111 -(void)endSessionInterrupted:(NSNotification*)notification;
112 #endif
113 @end
114
115 namespace WebCore {
116
117 static inline OSType avVideoCapturePixelBufferFormat()
118 {
119     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
120 #if PLATFORM(MAC)
121     return kCVPixelFormatType_420YpCbCr8Planar;
122 #else
123     return preferedPixelBufferFormat();
124 #endif
125 }
126
127 static dispatch_queue_t globaVideoCaptureSerialQueue()
128 {
129     static dispatch_queue_t globalQueue;
130     static dispatch_once_t onceToken;
131     dispatch_once(&onceToken, ^{
132         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
133     });
134     return globalQueue;
135 }
136
137 class AVVideoPreset : public VideoPreset {
138 public:
139     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
140     {
141         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
142     }
143
144     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
145         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
146         , format(format)
147     {
148     }
149
150     RetainPtr<AVCaptureDeviceFormatType> format;
151 };
152
153 CaptureSourceOrError AVVideoCaptureSource::create(String&& id, String&& hashSalt, const MediaConstraints* constraints)
154 {
155     AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:id];
156     if (!device)
157         return { };
158
159     auto source = adoptRef(*new AVVideoCaptureSource(device, WTFMove(id), WTFMove(hashSalt)));
160     if (constraints) {
161         auto result = source->applyConstraints(*constraints);
162         if (result)
163             return WTFMove(result.value().first);
164     }
165
166     return CaptureSourceOrError(WTFMove(source));
167 }
168
169 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, String&& id, String&& hashSalt)
170     : RealtimeVideoSource(device.localizedName, WTFMove(id), WTFMove(hashSalt))
171     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
172     , m_device(device)
173 {
174 #if PLATFORM(IOS_FAMILY)
175     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
176     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
177     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
178     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
179 #endif
180 }
181
182 AVVideoCaptureSource::~AVVideoCaptureSource()
183 {
184 #if PLATFORM(IOS_FAMILY)
185     RealtimeMediaSourceCenter::singleton().videoFactory().unsetActiveSource(*this);
186 #endif
187     [m_objcObserver disconnect];
188
189     if (!m_session)
190         return;
191
192     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
193     if ([m_session isRunning])
194         [m_session stopRunning];
195 }
196
197 void AVVideoCaptureSource::startProducingData()
198 {
199     if (!m_session) {
200         if (!setupSession())
201             return;
202     }
203
204     if ([m_session isRunning])
205         return;
206
207     [m_objcObserver addNotificationObservers];
208     [m_session startRunning];
209 }
210
211 void AVVideoCaptureSource::stopProducingData()
212 {
213     if (!m_session)
214         return;
215
216     [m_objcObserver removeNotificationObservers];
217
218     if ([m_session isRunning])
219         [m_session stopRunning];
220
221     m_interruption = InterruptionReason::None;
222 #if PLATFORM(IOS_FAMILY)
223     m_session = nullptr;
224 #endif
225 }
226
227 void AVVideoCaptureSource::beginConfiguration()
228 {
229     if (m_session)
230         [m_session beginConfiguration];
231 }
232
233 void AVVideoCaptureSource::commitConfiguration()
234 {
235     if (m_session)
236         [m_session commitConfiguration];
237 }
238
239 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
240 {
241     m_currentSettings = std::nullopt;
242 }
243
244 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
245 {
246     if (m_currentSettings)
247         return *m_currentSettings;
248
249     RealtimeMediaSourceSettings settings;
250     if ([device() position] == AVCaptureDevicePositionFront)
251         settings.setFacingMode(RealtimeMediaSourceSettings::User);
252     else if ([device() position] == AVCaptureDevicePositionBack)
253         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
254     else
255         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
256
257     settings.setFrameRate(frameRate());
258     auto& size = this->size();
259     settings.setWidth(size.width());
260     settings.setHeight(size.height());
261     settings.setDeviceId(hashedId());
262
263     RealtimeMediaSourceSupportedConstraints supportedConstraints;
264     supportedConstraints.setSupportsDeviceId(true);
265     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
266     supportedConstraints.setSupportsWidth(true);
267     supportedConstraints.setSupportsHeight(true);
268     supportedConstraints.setSupportsAspectRatio(true);
269     supportedConstraints.setSupportsFrameRate(true);
270
271     settings.setSupportedConstraints(supportedConstraints);
272
273     m_currentSettings = WTFMove(settings);
274
275     return *m_currentSettings;
276 }
277
278 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
279 {
280     if (m_capabilities)
281         return *m_capabilities;
282
283     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
284     capabilities.setDeviceId(hashedId());
285
286     AVCaptureDeviceTypedef *videoDevice = device();
287     if ([videoDevice position] == AVCaptureDevicePositionFront)
288         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
289     if ([videoDevice position] == AVCaptureDevicePositionBack)
290         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
291
292     updateCapabilities(capabilities);
293
294     m_capabilities = WTFMove(capabilities);
295
296     return *m_capabilities;
297 }
298
299 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
300 {
301 #if PLATFORM(IOS_FAMILY)
302     return [static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
303 #else
304     UNUSED_PARAM(preset);
305 #endif
306
307     return true;
308 }
309
310 void AVVideoCaptureSource::setSizeAndFrameRateWithPreset(IntSize requestedSize, double requestedFrameRate, RefPtr<VideoPreset> preset)
311 {
312     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
313
314     if (!m_session) {
315         m_pendingPreset = avPreset;
316         m_pendingSize = requestedSize;
317         m_pendingFrameRate = requestedFrameRate;
318         return;
319     }
320
321     m_pendingPreset = nullptr;
322     m_pendingFrameRate = 0;
323
324     auto* frameRateRange = frameDurationForFrameRate(requestedFrameRate);
325     ASSERT(frameRateRange);
326     if (!frameRateRange)
327         return;
328
329     if (!avPreset)
330         return;
331
332     ASSERT(avPreset->format);
333
334     m_requestedSize = requestedSize;
335
336     NSError *error = nil;
337     [m_session beginConfiguration];
338     @try {
339         if ([device() lockForConfiguration:&error]) {
340             if (!m_currentPreset || ![m_currentPreset->format.get() isEqual:avPreset->format.get()]) {
341                 [device() setActiveFormat:avPreset->format.get()];
342 #if PLATFORM(MAC)
343                 auto settingsDictionary = @{
344                     (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
345                     (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
346                     (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height())
347                 };
348                 [m_videoOutput setVideoSettings:settingsDictionary];
349 #endif
350             }
351             [device() setActiveVideoMinFrameDuration:[frameRateRange minFrameDuration]];
352             [device() setActiveVideoMaxFrameDuration:[frameRateRange maxFrameDuration]];
353             [device() unlockForConfiguration];
354         }
355     } @catch(NSException *exception) {
356         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - exception thrown configuring device: <%s> %s", [[exception name] UTF8String], [[exception reason] UTF8String]);
357     }
358     [m_session commitConfiguration];
359
360     m_currentPreset = avPreset;
361
362     if (error)
363         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - failed to lock video device for configuration: %s", [[error localizedDescription] UTF8String]);
364 }
365
366 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
367 {
368 #if PLATFORM(IOS_FAMILY)
369     switch (videoOrientation) {
370     case AVCaptureVideoOrientationPortrait:
371         return 180;
372     case AVCaptureVideoOrientationPortraitUpsideDown:
373         return 0;
374     case AVCaptureVideoOrientationLandscapeRight:
375         return 90;
376     case AVCaptureVideoOrientationLandscapeLeft:
377         return -90;
378     }
379 #else
380     switch (videoOrientation) {
381     case AVCaptureVideoOrientationPortrait:
382         return 0;
383     case AVCaptureVideoOrientationPortraitUpsideDown:
384         return 180;
385     case AVCaptureVideoOrientationLandscapeRight:
386         return 90;
387     case AVCaptureVideoOrientationLandscapeLeft:
388         return -90;
389     }
390 #endif
391 }
392
393 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
394 {
395     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
396     return connection ? sensorOrientation([connection videoOrientation]) : 0;
397 }
398
399 bool AVVideoCaptureSource::setupSession()
400 {
401     if (m_session)
402         return true;
403
404     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
405     [m_session addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)nil];
406
407     [m_session beginConfiguration];
408     bool success = setupCaptureSession();
409     [m_session commitConfiguration];
410
411     if (!success)
412         captureFailed();
413
414     return success;
415 }
416
417 AVFrameRateRangeType* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
418 {
419     AVFrameRateRangeType *bestFrameRateRange = nil;
420     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
421         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
422             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
423                 bestFrameRateRange = frameRateRange;
424         }
425     }
426
427     if (!bestFrameRateRange)
428         RELEASE_LOG(Media, "AVVideoCaptureSource::frameDurationForFrameRate, no frame rate range for rate %g", rate);
429
430     return bestFrameRateRange;
431 }
432
433 bool AVVideoCaptureSource::setupCaptureSession()
434 {
435 #if PLATFORM(IOS_FAMILY)
436     RealtimeMediaSourceCenter::singleton().videoFactory().setActiveSource(*this);
437 #endif
438
439     NSError *error = nil;
440     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
441     if (error) {
442         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
443         return false;
444     }
445
446     if (![session() canAddInput:videoIn.get()]) {
447         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
448         return false;
449     }
450     [session() addInput:videoIn.get()];
451
452     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
453     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
454
455     [m_videoOutput setVideoSettings:settingsDictionary.get()];
456     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
457     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
458
459     if (![session() canAddOutput:m_videoOutput.get()]) {
460         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
461         return false;
462     }
463     [session() addOutput:m_videoOutput.get()];
464
465     if (m_pendingPreset || m_pendingFrameRate)
466         setSizeAndFrameRateWithPreset(m_pendingSize, m_pendingFrameRate, m_pendingPreset);
467
468     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
469     computeSampleRotation();
470
471     return true;
472 }
473
474 void AVVideoCaptureSource::shutdownCaptureSession()
475 {
476     m_buffer = nullptr;
477     m_width = 0;
478     m_height = 0;
479 }
480
481 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
482 {
483 #if PLATFORM(IOS_FAMILY)
484     notifier.addObserver(*this);
485     orientationChanged(notifier.orientation());
486 #else
487     UNUSED_PARAM(notifier);
488 #endif
489 }
490
491 void AVVideoCaptureSource::orientationChanged(int orientation)
492 {
493     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
494     m_deviceOrientation = orientation;
495     computeSampleRotation();
496 }
497
498 void AVVideoCaptureSource::computeSampleRotation()
499 {
500     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
501     switch (m_sensorOrientation - m_deviceOrientation) {
502     case 0:
503         m_sampleRotation = MediaSample::VideoRotation::None;
504         break;
505     case 180:
506     case -180:
507         m_sampleRotation = MediaSample::VideoRotation::UpsideDown;
508         break;
509     case 90:
510         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
511         break;
512     case -90:
513     case -270:
514         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
515         break;
516     default:
517         ASSERT_NOT_REACHED();
518         m_sampleRotation = MediaSample::VideoRotation::None;
519     }
520 }
521
522 void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
523 {
524     if (!isProducingData() || muted())
525         return;
526
527     m_buffer = &sample.get();
528     auto dimensions = roundedIntSize(sample->presentationSize());
529     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
530         dimensions = { dimensions.height(), dimensions.width() };
531
532     if (dimensions.width() != m_width || dimensions.height() != m_height) {
533         m_width = dimensions.width();
534         m_height = dimensions.height();
535         setSize(dimensions);
536     }
537
538     dispatchMediaSampleToObservers(WTFMove(sample));
539 }
540
541 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
542 {
543     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
544     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
545         processNewFrame(WTFMove(sample));
546     });
547 }
548
549 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
550 {
551     scheduleDeferredTask([this, state] {
552         if ((state == m_isRunning) && (state == !muted()))
553             return;
554
555         m_isRunning = state;
556         notifyMutedChange(!m_isRunning);
557     });
558 }
559
560 bool AVVideoCaptureSource::interrupted() const
561 {
562     if (m_interruption != InterruptionReason::None)
563         return true;
564
565     return RealtimeMediaSource::interrupted();
566 }
567
568 void AVVideoCaptureSource::generatePresets()
569 {
570     Vector<Ref<VideoPreset>> presets;
571     for (AVCaptureDeviceFormatType* format in [device() formats]) {
572
573         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
574         IntSize size = {dimensions.width, dimensions.height};
575         auto index = presets.findMatching([&size](auto& preset) {
576             return size == preset->size;
577         });
578         if (index != notFound)
579             continue;
580
581         Vector<FrameRateRange> frameRates;
582         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges])
583             frameRates.append({ range.minFrameRate, range.maxFrameRate});
584
585         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
586     }
587
588     setSupportedPresets(WTFMove(presets));
589 }
590
591 #if PLATFORM(IOS_FAMILY)
592 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
593 {
594     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
595         return;
596
597     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
598     [m_session startRunning];
599     m_isRunning = [m_session isRunning];
600 }
601
602 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
603 {
604     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
605 }
606
607 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
608 {
609     InterruptionReason reason = m_interruption;
610
611     m_interruption = InterruptionReason::None;
612     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
613         return;
614
615     [m_session startRunning];
616     m_isRunning = [m_session isRunning];
617 }
618 #endif
619
620 } // namespace WebCore
621
622 @implementation WebCoreAVVideoCaptureSourceObserver
623
624 - (id)initWithCallback:(AVVideoCaptureSource*)callback
625 {
626     self = [super init];
627     if (!self)
628         return nil;
629
630     m_callback = callback;
631
632     return self;
633 }
634
635 - (void)disconnect
636 {
637     [NSObject cancelPreviousPerformRequestsWithTarget:self];
638     [self removeNotificationObservers];
639     m_callback = nullptr;
640 }
641
642 - (void)addNotificationObservers
643 {
644 #if PLATFORM(IOS_FAMILY)
645     ASSERT(m_callback);
646
647     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
648     AVCaptureSessionType* session = m_callback->session();
649
650     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
651     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
652     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
653 #endif
654 }
655
656 - (void)removeNotificationObservers
657 {
658 #if PLATFORM(IOS_FAMILY)
659     [[NSNotificationCenter defaultCenter] removeObserver:self];
660 #endif
661 }
662
663 - (void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection
664 {
665     if (!m_callback)
666         return;
667
668     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
669 }
670
671 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
672 {
673     UNUSED_PARAM(object);
674     UNUSED_PARAM(context);
675
676     if (!m_callback)
677         return;
678
679     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
680
681 #if !LOG_DISABLED
682     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
683
684     if (willChange)
685         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
686     else {
687         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
688         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
689     }
690 #endif
691
692     if ([keyPath isEqualToString:@"running"])
693         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
694 }
695
696 #if PLATFORM(IOS_FAMILY)
697 - (void)sessionRuntimeError:(NSNotification*)notification
698 {
699     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
700     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::sessionRuntimeError(%p) - error = %s", self, [[error localizedDescription] UTF8String]);
701
702     if (m_callback)
703         m_callback->captureSessionRuntimeError(error);
704 }
705
706 - (void)beginSessionInterrupted:(NSNotification*)notification
707 {
708     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::beginSessionInterrupted(%p) - reason = %d", self, [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
709
710     if (m_callback)
711         m_callback->captureSessionBeginInterruption(notification);
712 }
713
714 - (void)endSessionInterrupted:(NSNotification*)notification
715 {
716     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::endSessionInterrupted(%p)", self);
717
718     if (m_callback)
719         m_callback->captureSessionEndInterruption(notification);
720 }
721 #endif
722
723 @end
724
725 #endif // ENABLE(MEDIA_STREAM)