4cb2f9397eacc15f76dbdf4b6378089285035ed0
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "IntRect.h"
33 #import "Logging.h"
34 #import "MediaConstraints.h"
35 #import "MediaSampleAVFObjC.h"
36 #import "PixelBufferResizer.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenterMac.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoUtilities.h"
41 #import <AVFoundation/AVCaptureDevice.h>
42 #import <AVFoundation/AVCaptureInput.h>
43 #import <AVFoundation/AVCaptureOutput.h>
44 #import <AVFoundation/AVCaptureSession.h>
45 #import <AVFoundation/AVError.h>
46 #import <objc/runtime.h>
47
48 #import <pal/cf/CoreMediaSoftLink.h>
49 #import "CoreVideoSoftLink.h"
50
51 typedef AVCaptureConnection AVCaptureConnectionType;
52 typedef AVCaptureDevice AVCaptureDeviceTypedef;
53 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
54 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
55 typedef AVCaptureOutput AVCaptureOutputType;
56 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
57 typedef AVFrameRateRange AVFrameRateRangeType;
58 typedef AVCaptureSession AVCaptureSessionType;
59
60 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
61
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
68 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
69 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
70
71 #define AVCaptureConnection getAVCaptureConnectionClass()
72 #define AVCaptureDevice getAVCaptureDeviceClass()
73 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
74 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
75 #define AVCaptureOutput getAVCaptureOutputClass()
76 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
77 #define AVFrameRateRange getAVFrameRateRangeClass()
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
80
81 #if PLATFORM(IOS)
82 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionRuntimeErrorNotification, NSString *)
83 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionWasInterruptedNotification, NSString *)
84 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionEndedNotification, NSString *)
85 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionReasonKey, NSString *)
86 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionErrorKey, NSString *)
87
88 #define AVCaptureSessionRuntimeErrorNotification getAVCaptureSessionRuntimeErrorNotification()
89 #define AVCaptureSessionWasInterruptedNotification getAVCaptureSessionWasInterruptedNotification()
90 #define AVCaptureSessionInterruptionEndedNotification getAVCaptureSessionInterruptionEndedNotification()
91 #define AVCaptureSessionInterruptionReasonKey getAVCaptureSessionInterruptionReasonKey()
92 #define AVCaptureSessionErrorKey getAVCaptureSessionErrorKey()
93 #endif
94
95 using namespace WebCore;
96 using namespace PAL;
97
98 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
99     AVVideoCaptureSource* m_callback;
100 }
101
102 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
103 -(void)disconnect;
104 -(void)addNotificationObservers;
105 -(void)removeNotificationObservers;
106 -(void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection;
107 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
108 #if PLATFORM(IOS)
109 -(void)sessionRuntimeError:(NSNotification*)notification;
110 -(void)beginSessionInterrupted:(NSNotification*)notification;
111 -(void)endSessionInterrupted:(NSNotification*)notification;
112 #endif
113 @end
114
115 namespace WebCore {
116
117 static inline OSType avVideoCapturePixelBufferFormat()
118 {
119     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
120 #if PLATFORM(MAC)
121     return kCVPixelFormatType_420YpCbCr8Planar;
122 #else
123     return preferedPixelBufferFormat();
124 #endif
125 }
126
127 static dispatch_queue_t globaVideoCaptureSerialQueue()
128 {
129     static dispatch_queue_t globalQueue;
130     static dispatch_once_t onceToken;
131     dispatch_once(&onceToken, ^{
132         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
133     });
134     return globalQueue;
135 }
136
137 class AVVideoPreset : public VideoPreset {
138 public:
139     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
140     {
141         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
142     }
143
144     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
145         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
146         , format(format)
147     {
148     }
149
150     RetainPtr<AVCaptureDeviceFormatType> format;
151 };
152
153 CaptureSourceOrError AVVideoCaptureSource::create(const AtomicString& id, const MediaConstraints* constraints)
154 {
155     AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:id];
156     if (!device)
157         return { };
158
159     auto source = adoptRef(*new AVVideoCaptureSource(device, id));
160     if (constraints) {
161         auto result = source->applyConstraints(*constraints);
162         if (result)
163             return WTFMove(result.value().first);
164     }
165
166     return CaptureSourceOrError(WTFMove(source));
167 }
168
169 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id)
170     : RealtimeVideoSource(id, device.localizedName)
171     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
172     , m_device(device)
173 {
174 #if PLATFORM(IOS)
175     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
176     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
177     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
178     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
179 #endif
180
181     setPersistentID(String(device.uniqueID));
182 }
183
184 AVVideoCaptureSource::~AVVideoCaptureSource()
185 {
186 #if PLATFORM(IOS)
187     RealtimeMediaSourceCenter::singleton().videoFactory().unsetActiveSource(*this);
188 #endif
189     [m_objcObserver disconnect];
190
191     if (!m_session)
192         return;
193
194     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
195     if ([m_session isRunning])
196         [m_session stopRunning];
197 }
198
199 void AVVideoCaptureSource::startProducingData()
200 {
201     if (!m_session) {
202         if (!setupSession())
203             return;
204     }
205
206     if ([m_session isRunning])
207         return;
208
209     [m_objcObserver addNotificationObservers];
210     [m_session startRunning];
211 }
212
213 void AVVideoCaptureSource::stopProducingData()
214 {
215     if (!m_session)
216         return;
217
218     [m_objcObserver removeNotificationObservers];
219
220     if ([m_session isRunning])
221         [m_session stopRunning];
222
223     m_interruption = InterruptionReason::None;
224 #if PLATFORM(IOS)
225     m_session = nullptr;
226 #endif
227 }
228
229 void AVVideoCaptureSource::beginConfiguration()
230 {
231     if (m_session)
232         [m_session beginConfiguration];
233 }
234
235 void AVVideoCaptureSource::commitConfiguration()
236 {
237     if (m_session)
238         [m_session commitConfiguration];
239 }
240
241 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
242 {
243     m_currentSettings = std::nullopt;
244 }
245
246 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
247 {
248     if (m_currentSettings)
249         return *m_currentSettings;
250
251     RealtimeMediaSourceSettings settings;
252     if ([device() position] == AVCaptureDevicePositionFront)
253         settings.setFacingMode(RealtimeMediaSourceSettings::User);
254     else if ([device() position] == AVCaptureDevicePositionBack)
255         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
256     else
257         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
258
259     settings.setFrameRate(frameRate());
260     auto& size = this->size();
261     settings.setWidth(size.width());
262     settings.setHeight(size.height());
263     settings.setDeviceId(id());
264
265     RealtimeMediaSourceSupportedConstraints supportedConstraints;
266     supportedConstraints.setSupportsDeviceId(true);
267     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
268     supportedConstraints.setSupportsWidth(true);
269     supportedConstraints.setSupportsHeight(true);
270     supportedConstraints.setSupportsAspectRatio(true);
271     supportedConstraints.setSupportsFrameRate(true);
272
273     settings.setSupportedConstraints(supportedConstraints);
274
275     m_currentSettings = WTFMove(settings);
276
277     return *m_currentSettings;
278 }
279
280 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
281 {
282     if (m_capabilities)
283         return *m_capabilities;
284
285     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
286     capabilities.setDeviceId(id());
287
288     AVCaptureDeviceTypedef *videoDevice = device();
289     if ([videoDevice position] == AVCaptureDevicePositionFront)
290         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
291     if ([videoDevice position] == AVCaptureDevicePositionBack)
292         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
293
294     updateCapabilities(capabilities);
295
296     m_capabilities = WTFMove(capabilities);
297
298     return *m_capabilities;
299 }
300
301 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
302 {
303 #if PLATFORM(IOS)
304     return ![static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
305 #else
306     UNUSED_PARAM(preset);
307 #endif
308
309     return true;
310 }
311
312 void AVVideoCaptureSource::setSizeAndFrameRateWithPreset(IntSize requestedSize, double requestedFrameRate, RefPtr<VideoPreset> preset)
313 {
314     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
315
316     if (!m_session) {
317         m_pendingPreset = avPreset;
318         m_pendingSize = requestedSize;
319         m_pendingFrameRate = requestedFrameRate;
320         return;
321     }
322
323     m_pendingPreset = nullptr;
324     m_pendingFrameRate = 0;
325
326     auto* frameRateRange = frameDurationForFrameRate(requestedFrameRate);
327     ASSERT(frameRateRange);
328     if (!frameRateRange)
329         return;
330
331     if (!avPreset)
332         return;
333
334     ASSERT(avPreset->format);
335
336     m_requestedSize = requestedSize;
337
338     NSError *error = nil;
339     [m_session beginConfiguration];
340     @try {
341         if ([device() lockForConfiguration:&error]) {
342             if (!m_currentPreset || ![m_currentPreset->format.get() isEqual:avPreset->format.get()]) {
343                 [device() setActiveFormat:avPreset->format.get()];
344 #if PLATFORM(MAC)
345                 auto settingsDictionary = @{
346                     (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
347                     (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
348                     (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height())
349                 };
350                 [m_videoOutput setVideoSettings:settingsDictionary];
351 #endif
352             }
353             [device() setActiveVideoMinFrameDuration:[frameRateRange minFrameDuration]];
354             [device() setActiveVideoMaxFrameDuration:[frameRateRange maxFrameDuration]];
355             [device() unlockForConfiguration];
356         }
357     } @catch(NSException *exception) {
358         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - exception thrown configuring device: <%s> %s", [[exception name] UTF8String], [[exception reason] UTF8String]);
359     }
360     [m_session commitConfiguration];
361
362     m_currentPreset = avPreset;
363
364     if (error)
365         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - failed to lock video device for configuration: %s", [[error localizedDescription] UTF8String]);
366 }
367
368 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
369 {
370 #if PLATFORM(IOS)
371     switch (videoOrientation) {
372     case AVCaptureVideoOrientationPortrait:
373         return 180;
374     case AVCaptureVideoOrientationPortraitUpsideDown:
375         return 0;
376     case AVCaptureVideoOrientationLandscapeRight:
377         return 90;
378     case AVCaptureVideoOrientationLandscapeLeft:
379         return -90;
380     }
381 #else
382     switch (videoOrientation) {
383     case AVCaptureVideoOrientationPortrait:
384         return 0;
385     case AVCaptureVideoOrientationPortraitUpsideDown:
386         return 180;
387     case AVCaptureVideoOrientationLandscapeRight:
388         return 90;
389     case AVCaptureVideoOrientationLandscapeLeft:
390         return -90;
391     }
392 #endif
393 }
394
395 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
396 {
397     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
398     return connection ? sensorOrientation([connection videoOrientation]) : 0;
399 }
400
401 bool AVVideoCaptureSource::setupSession()
402 {
403     if (m_session)
404         return true;
405
406     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
407     [m_session addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)nil];
408
409     [m_session beginConfiguration];
410     bool success = setupCaptureSession();
411     [m_session commitConfiguration];
412
413     if (!success)
414         captureFailed();
415
416     return success;
417 }
418
419 AVFrameRateRangeType* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
420 {
421     AVFrameRateRangeType *bestFrameRateRange = nil;
422     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
423         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
424             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
425                 bestFrameRateRange = frameRateRange;
426         }
427     }
428
429     if (!bestFrameRateRange)
430         RELEASE_LOG(Media, "AVVideoCaptureSource::frameDurationForFrameRate, no frame rate range for rate %g", rate);
431
432     return bestFrameRateRange;
433 }
434
435 bool AVVideoCaptureSource::setupCaptureSession()
436 {
437 #if PLATFORM(IOS)
438     RealtimeMediaSourceCenter::singleton().videoFactory().setActiveSource(*this);
439 #endif
440
441     NSError *error = nil;
442     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
443     if (error) {
444         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
445         return false;
446     }
447
448     if (![session() canAddInput:videoIn.get()]) {
449         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
450         return false;
451     }
452     [session() addInput:videoIn.get()];
453
454     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
455     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
456
457     [m_videoOutput setVideoSettings:settingsDictionary.get()];
458     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
459     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
460
461     if (![session() canAddOutput:m_videoOutput.get()]) {
462         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
463         return false;
464     }
465     [session() addOutput:m_videoOutput.get()];
466
467     if (m_pendingPreset || m_pendingFrameRate)
468         setSizeAndFrameRateWithPreset(m_pendingSize, m_pendingFrameRate, m_pendingPreset);
469
470     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
471     computeSampleRotation();
472
473     return true;
474 }
475
476 void AVVideoCaptureSource::shutdownCaptureSession()
477 {
478     m_buffer = nullptr;
479     m_width = 0;
480     m_height = 0;
481 }
482
483 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
484 {
485 #if PLATFORM(IOS)
486     notifier.addObserver(*this);
487     orientationChanged(notifier.orientation());
488 #else
489     UNUSED_PARAM(notifier);
490 #endif
491 }
492
493 void AVVideoCaptureSource::orientationChanged(int orientation)
494 {
495     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
496     m_deviceOrientation = orientation;
497     computeSampleRotation();
498 }
499
500 void AVVideoCaptureSource::computeSampleRotation()
501 {
502     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
503     switch (m_sensorOrientation - m_deviceOrientation) {
504     case 0:
505         m_sampleRotation = MediaSample::VideoRotation::None;
506         break;
507     case 180:
508     case -180:
509         m_sampleRotation = MediaSample::VideoRotation::UpsideDown;
510         break;
511     case 90:
512         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
513         break;
514     case -90:
515     case -270:
516         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
517         break;
518     default:
519         ASSERT_NOT_REACHED();
520         m_sampleRotation = MediaSample::VideoRotation::None;
521     }
522 }
523
524 void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer, RetainPtr<AVCaptureConnectionType> connection)
525 {
526     if (!isProducingData() || muted())
527         return;
528
529     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer.get());
530     if (!formatDescription)
531         return;
532
533     m_buffer = sampleBuffer;
534     CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
535     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
536         std::swap(dimensions.width, dimensions.height);
537
538     if (dimensions.width != m_width || dimensions.height != m_height) {
539         m_width = dimensions.width;
540         m_height = dimensions.height;
541         setSize({ dimensions.width, dimensions.height });
542     }
543
544     videoSampleAvailable(MediaSampleAVFObjC::create(m_buffer.get(), m_sampleRotation, [connection isVideoMirrored]));
545 }
546
547 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
548 {
549     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
550     if (!formatDescription)
551         return;
552
553     CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
554     if (dimensions.width != m_requestedSize.width() || dimensions.height != m_requestedSize.height()) {
555         if (m_pixelBufferResizer && !m_pixelBufferResizer->canResizeTo(m_requestedSize))
556             m_pixelBufferResizer = nullptr;
557
558         if (!m_pixelBufferResizer)
559             m_pixelBufferResizer = std::make_unique<PixelBufferResizer>(m_requestedSize, avVideoCapturePixelBufferFormat());
560     } else
561         m_pixelBufferResizer = nullptr;
562
563     auto buffer = retainPtr(sampleBuffer);
564     if (m_pixelBufferResizer) {
565         buffer = m_pixelBufferResizer->resize(sampleBuffer);
566         if (!buffer)
567             return;
568     }
569
570     scheduleDeferredTask([this, buffer, connection = retainPtr(captureConnection)] {
571         this->processNewFrame(buffer, connection);
572     });
573 }
574
575 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
576 {
577     scheduleDeferredTask([this, state] {
578         if ((state == m_isRunning) && (state == !muted()))
579             return;
580
581         m_isRunning = state;
582         notifyMutedChange(!m_isRunning);
583     });
584 }
585
586 bool AVVideoCaptureSource::interrupted() const
587 {
588     if (m_interruption != InterruptionReason::None)
589         return true;
590
591     return RealtimeMediaSource::interrupted();
592 }
593
594 void AVVideoCaptureSource::generatePresets()
595 {
596     Vector<Ref<VideoPreset>> presets;
597     for (AVCaptureDeviceFormatType* format in [device() formats]) {
598
599         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
600         IntSize size = {dimensions.width, dimensions.height};
601         auto index = presets.findMatching([&size](auto& preset) {
602             return size == preset->size;
603         });
604         if (index != notFound)
605             continue;
606
607         Vector<FrameRateRange> frameRates;
608         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges])
609             frameRates.append({ range.minFrameRate, range.maxFrameRate});
610
611         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
612     }
613
614     setSupportedPresets(WTFMove(presets));
615 }
616
617 #if PLATFORM(IOS)
618 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
619 {
620     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
621         return;
622
623     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
624     [m_session startRunning];
625     m_isRunning = [m_session isRunning];
626 }
627
628 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
629 {
630     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
631 }
632
633 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
634 {
635     InterruptionReason reason = m_interruption;
636
637     m_interruption = InterruptionReason::None;
638     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
639         return;
640
641     [m_session startRunning];
642     m_isRunning = [m_session isRunning];
643 }
644 #endif
645
646 } // namespace WebCore
647
648 @implementation WebCoreAVVideoCaptureSourceObserver
649
650 - (id)initWithCallback:(AVVideoCaptureSource*)callback
651 {
652     self = [super init];
653     if (!self)
654         return nil;
655
656     m_callback = callback;
657
658     return self;
659 }
660
661 - (void)disconnect
662 {
663     [NSObject cancelPreviousPerformRequestsWithTarget:self];
664     [self removeNotificationObservers];
665     m_callback = nullptr;
666 }
667
668 - (void)addNotificationObservers
669 {
670 #if PLATFORM(IOS)
671     ASSERT(m_callback);
672
673     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
674     AVCaptureSessionType* session = m_callback->session();
675
676     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
677     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
678     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
679 #endif
680 }
681
682 - (void)removeNotificationObservers
683 {
684 #if PLATFORM(IOS)
685     [[NSNotificationCenter defaultCenter] removeObserver:self];
686 #endif
687 }
688
689 - (void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection
690 {
691     if (!m_callback)
692         return;
693
694     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
695 }
696
697 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
698 {
699     UNUSED_PARAM(object);
700     UNUSED_PARAM(context);
701
702     if (!m_callback)
703         return;
704
705     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
706
707 #if !LOG_DISABLED
708     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
709
710     if (willChange)
711         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
712     else {
713         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
714         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
715     }
716 #endif
717
718     if ([keyPath isEqualToString:@"running"])
719         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
720 }
721
722 #if PLATFORM(IOS)
723 - (void)sessionRuntimeError:(NSNotification*)notification
724 {
725     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
726     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::sessionRuntimeError(%p) - error = %s", self, [[error localizedDescription] UTF8String]);
727
728     if (m_callback)
729         m_callback->captureSessionRuntimeError(error);
730 }
731
732 - (void)beginSessionInterrupted:(NSNotification*)notification
733 {
734     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::beginSessionInterrupted(%p) - reason = %d", self, [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
735
736     if (m_callback)
737         m_callback->captureSessionBeginInterruption(notification);
738 }
739
740 - (void)endSessionInterrupted:(NSNotification*)notification
741 {
742     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::endSessionInterrupted(%p)", self);
743
744     if (m_callback)
745         m_callback->captureSessionEndInterruption(notification);
746 }
747 #endif
748
749 @end
750
751 #endif // ENABLE(MEDIA_STREAM)