8b40a29e8b977868530b2a5b6a4076256ec17286
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "ImageTransferSessionVT.h"
33 #import "IntRect.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "MediaSampleAVFObjC.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenter.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoUtilities.h"
41 #import <AVFoundation/AVCaptureDevice.h>
42 #import <AVFoundation/AVCaptureInput.h>
43 #import <AVFoundation/AVCaptureOutput.h>
44 #import <AVFoundation/AVCaptureSession.h>
45 #import <AVFoundation/AVError.h>
46 #import <objc/runtime.h>
47
48 #import "CoreVideoSoftLink.h"
49 #import <pal/cocoa/AVFoundationSoftLink.h>
50 #import <pal/cf/CoreMediaSoftLink.h>
51
52 using namespace WebCore;
53 using namespace PAL;
54
55 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
56     AVVideoCaptureSource* m_callback;
57 }
58
59 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
60 -(void)disconnect;
61 -(void)addNotificationObservers;
62 -(void)removeNotificationObservers;
63 -(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection;
64 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
65 #if PLATFORM(IOS_FAMILY)
66 -(void)sessionRuntimeError:(NSNotification*)notification;
67 -(void)beginSessionInterrupted:(NSNotification*)notification;
68 -(void)endSessionInterrupted:(NSNotification*)notification;
69 -(void)deviceConnectedDidChange:(NSNotification*)notification;
70 #endif
71 @end
72
73 namespace WebCore {
74
75 static inline OSType avVideoCapturePixelBufferFormat()
76 {
77     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
78 #if PLATFORM(MAC)
79     return kCVPixelFormatType_420YpCbCr8Planar;
80 #else
81     return preferedPixelBufferFormat();
82 #endif
83 }
84
85 static dispatch_queue_t globaVideoCaptureSerialQueue()
86 {
87     static dispatch_queue_t globalQueue;
88     static dispatch_once_t onceToken;
89     dispatch_once(&onceToken, ^{
90         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
91     });
92     return globalQueue;
93 }
94
95 class AVVideoPreset : public VideoPreset {
96 public:
97     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormat* format)
98     {
99         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
100     }
101
102     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormat* format)
103         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
104         , format(format)
105     {
106     }
107
108     RetainPtr<AVCaptureDeviceFormat> format;
109 };
110
111 CaptureSourceOrError AVVideoCaptureSource::create(String&& id, String&& hashSalt, const MediaConstraints* constraints)
112 {
113     AVCaptureDevice *device = [PAL::getAVCaptureDeviceClass() deviceWithUniqueID:id];
114     if (!device)
115         return { };
116
117     auto source = adoptRef(*new AVVideoCaptureSource(device, WTFMove(id), WTFMove(hashSalt)));
118     if (constraints) {
119         auto result = source->applyConstraints(*constraints);
120         if (result)
121             return WTFMove(result.value().badConstraint);
122     }
123
124     return CaptureSourceOrError(WTFMove(source));
125 }
126
127 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDevice* device, String&& id, String&& hashSalt)
128     : RealtimeVideoSource(device.localizedName, WTFMove(id), WTFMove(hashSalt))
129     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
130     , m_device(device)
131 {
132 #if PLATFORM(IOS_FAMILY)
133     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
134     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
135     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
136     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
137 #endif
138
139     [m_device.get() addObserver:m_objcObserver.get() forKeyPath:@"suspended" options:NSKeyValueObservingOptionNew context:(void *)nil];
140 }
141
142 AVVideoCaptureSource::~AVVideoCaptureSource()
143 {
144 #if PLATFORM(IOS_FAMILY)
145     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().unsetActiveSource(*this);
146 #endif
147     [m_objcObserver disconnect];
148     [m_device removeObserver:m_objcObserver.get() forKeyPath:@"suspended"];
149
150     if (!m_session)
151         return;
152
153     if ([m_session isRunning])
154         [m_session stopRunning];
155
156     clearSession();
157 }
158
159 void AVVideoCaptureSource::clearSession()
160 {
161     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
162     ASSERT(m_session);
163     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"running"];
164     m_session = nullptr;
165 }
166
167 void AVVideoCaptureSource::startProducingData()
168 {
169     if (!m_session) {
170         if (!setupSession())
171             return;
172     }
173
174     if ([m_session isRunning])
175         return;
176
177     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
178     [m_objcObserver addNotificationObservers];
179     [m_session startRunning];
180 }
181
182 void AVVideoCaptureSource::stopProducingData()
183 {
184     if (!m_session)
185         return;
186
187     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
188     [m_objcObserver removeNotificationObservers];
189
190     if ([m_session isRunning])
191         [m_session stopRunning];
192
193     m_interruption = InterruptionReason::None;
194 #if PLATFORM(IOS_FAMILY)
195     clearSession();
196 #endif
197 }
198
199 void AVVideoCaptureSource::beginConfiguration()
200 {
201     if (m_session)
202         [m_session beginConfiguration];
203 }
204
205 void AVVideoCaptureSource::commitConfiguration()
206 {
207     if (m_session)
208         [m_session commitConfiguration];
209 }
210
211 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
212 {
213     m_currentSettings = WTF::nullopt;
214 }
215
216 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
217 {
218     if (m_currentSettings)
219         return *m_currentSettings;
220
221     RealtimeMediaSourceSettings settings;
222     if ([device() position] == AVCaptureDevicePositionFront)
223         settings.setFacingMode(RealtimeMediaSourceSettings::User);
224     else if ([device() position] == AVCaptureDevicePositionBack)
225         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
226     else
227         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
228
229     settings.setFrameRate(frameRate());
230
231     auto size = this->size();
232     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
233         size = size.transposedSize();
234     
235     settings.setWidth(size.width());
236     settings.setHeight(size.height());
237     settings.setDeviceId(hashedId());
238
239     RealtimeMediaSourceSupportedConstraints supportedConstraints;
240     supportedConstraints.setSupportsDeviceId(true);
241     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
242     supportedConstraints.setSupportsWidth(true);
243     supportedConstraints.setSupportsHeight(true);
244     supportedConstraints.setSupportsAspectRatio(true);
245     supportedConstraints.setSupportsFrameRate(true);
246
247     settings.setSupportedConstraints(supportedConstraints);
248
249     m_currentSettings = WTFMove(settings);
250
251     return *m_currentSettings;
252 }
253
254 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
255 {
256     if (m_capabilities)
257         return *m_capabilities;
258
259     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
260     capabilities.setDeviceId(hashedId());
261
262     AVCaptureDevice *videoDevice = device();
263     if ([videoDevice position] == AVCaptureDevicePositionFront)
264         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
265     if ([videoDevice position] == AVCaptureDevicePositionBack)
266         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
267
268     updateCapabilities(capabilities);
269
270     m_capabilities = WTFMove(capabilities);
271
272     return *m_capabilities;
273 }
274
275 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
276 {
277 #if PLATFORM(IOS_FAMILY)
278     return [static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
279 #else
280     UNUSED_PARAM(preset);
281 #endif
282
283     return true;
284 }
285
286 void AVVideoCaptureSource::setFrameRateWithPreset(double requestedFrameRate, RefPtr<VideoPreset> preset)
287 {
288     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
289     m_currentPreset = avPreset;
290     m_currentFrameRate = requestedFrameRate;
291
292     setSessionSizeAndFrameRate();
293 }
294
295 void AVVideoCaptureSource::setSessionSizeAndFrameRate()
296 {
297     if (!m_session)
298         return;
299
300     auto* avPreset = m_currentPreset.get();
301     if (!avPreset)
302         return;
303
304     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, SizeAndFrameRate { m_currentPreset->size.width(), m_currentPreset->size.height(), m_currentFrameRate });
305
306     ASSERT(avPreset->format);
307
308     NSError *error = nil;
309     [m_session beginConfiguration];
310     @try {
311         if ([device() lockForConfiguration:&error]) {
312             ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, "setting preset to ", m_currentSize);
313             [device() setActiveFormat:avPreset->format.get()];
314
315 #if PLATFORM(MAC)
316             auto settingsDictionary = @{
317                 (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
318                 (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
319                 (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height()),
320                 (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{ }
321             };
322             [m_videoOutput setVideoSettings:settingsDictionary];
323 #endif
324
325             auto* frameRateRange = frameDurationForFrameRate(m_currentFrameRate);
326             ASSERT(frameRateRange);
327             if (frameRateRange) {
328                 m_currentFrameRate = clampTo(m_currentFrameRate, frameRateRange.minFrameRate, frameRateRange.maxFrameRate);
329
330                 auto frameDuration = CMTimeMake(1, m_currentFrameRate);
331                 if (CMTimeCompare(frameDuration, frameRateRange.minFrameDuration) < 0)
332                     frameDuration = frameRateRange.minFrameDuration;
333                 else if (CMTimeCompare(frameDuration, frameRateRange.maxFrameDuration) > 0)
334                     frameDuration = frameRateRange.maxFrameDuration;
335
336                 ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, "setting frame rate to ", m_currentFrameRate, ", duration ", PAL::toMediaTime(frameDuration));
337
338                 [device() setActiveVideoMinFrameDuration: frameDuration];
339                 [device() setActiveVideoMaxFrameDuration: frameDuration];
340             } else
341                 ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "cannot find proper frame rate range for the selected preset\n");
342
343             [device() unlockForConfiguration];
344         }
345     } @catch(NSException *exception) {
346         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "error configuring device ", [[exception name] UTF8String], ", reason : ", [[exception reason] UTF8String]);
347         [device() unlockForConfiguration];
348         ASSERT_NOT_REACHED();
349     }
350     [m_session commitConfiguration];
351
352     ERROR_LOG_IF(error && loggerPtr(), LOGIDENTIFIER, [[error localizedDescription] UTF8String]);
353 }
354
355 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
356 {
357 #if PLATFORM(IOS_FAMILY)
358     switch (videoOrientation) {
359     case AVCaptureVideoOrientationPortrait:
360         return 180;
361     case AVCaptureVideoOrientationPortraitUpsideDown:
362         return 0;
363     case AVCaptureVideoOrientationLandscapeRight:
364         return 90;
365     case AVCaptureVideoOrientationLandscapeLeft:
366         return -90;
367     }
368 #else
369     switch (videoOrientation) {
370     case AVCaptureVideoOrientationPortrait:
371         return 0;
372     case AVCaptureVideoOrientationPortraitUpsideDown:
373         return 180;
374     case AVCaptureVideoOrientationLandscapeRight:
375         return 90;
376     case AVCaptureVideoOrientationLandscapeLeft:
377         return -90;
378     }
379 #endif
380 }
381
382 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutput* videoOutput)
383 {
384     AVCaptureConnection* connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
385     return connection ? sensorOrientation([connection videoOrientation]) : 0;
386 }
387
388 bool AVVideoCaptureSource::setupSession()
389 {
390     if (m_session)
391         return true;
392
393     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
394
395     m_session = adoptNS([PAL::allocAVCaptureSessionInstance() init]);
396     [m_session addObserver:m_objcObserver.get() forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:(void *)nil];
397
398     [m_session beginConfiguration];
399     bool success = setupCaptureSession();
400     [m_session commitConfiguration];
401
402     if (!success)
403         captureFailed();
404
405     return success;
406 }
407
408 AVFrameRateRange* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
409 {
410     AVFrameRateRange *bestFrameRateRange = nil;
411     for (AVFrameRateRange *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
412         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
413             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
414                 bestFrameRateRange = frameRateRange;
415         }
416     }
417
418     if (!bestFrameRateRange)
419         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "no frame rate range for rate ", rate);
420
421     return bestFrameRateRange;
422 }
423
424 bool AVVideoCaptureSource::setupCaptureSession()
425 {
426     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
427
428 #if PLATFORM(IOS_FAMILY)
429     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().setActiveSource(*this);
430 #endif
431
432     NSError *error = nil;
433     RetainPtr<AVCaptureDeviceInput> videoIn = adoptNS([PAL::allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
434     if (error) {
435         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "failed to allocate AVCaptureDeviceInput ", [[error localizedDescription] UTF8String]);
436         return false;
437     }
438
439     if (![session() canAddInput:videoIn.get()]) {
440         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "unable to add video input device");
441         return false;
442     }
443     [session() addInput:videoIn.get()];
444
445     m_videoOutput = adoptNS([PAL::allocAVCaptureVideoDataOutputInstance() init]);
446     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
447
448     [m_videoOutput setVideoSettings:settingsDictionary.get()];
449     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
450     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
451
452     if (![session() canAddOutput:m_videoOutput.get()]) {
453         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "unable to add video output device");
454         return false;
455     }
456     [session() addOutput:m_videoOutput.get()];
457
458     setSessionSizeAndFrameRate();
459
460     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
461     computeSampleRotation();
462
463     return true;
464 }
465
466 void AVVideoCaptureSource::shutdownCaptureSession()
467 {
468     m_buffer = nullptr;
469 }
470
471 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
472 {
473 #if PLATFORM(IOS_FAMILY)
474     notifier.addObserver(*this);
475     orientationChanged(notifier.orientation());
476 #else
477     UNUSED_PARAM(notifier);
478 #endif
479 }
480
481 void AVVideoCaptureSource::orientationChanged(int orientation)
482 {
483     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
484     m_deviceOrientation = orientation;
485     computeSampleRotation();
486 }
487
488 void AVVideoCaptureSource::computeSampleRotation()
489 {
490     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
491     MediaSample::VideoRotation sampleRotation;
492     switch (m_sensorOrientation - m_deviceOrientation) {
493     case 0:
494         sampleRotation = MediaSample::VideoRotation::None;
495         break;
496     case 180:
497     case -180:
498         sampleRotation = MediaSample::VideoRotation::UpsideDown;
499         break;
500     case 90:
501         sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
502         break;
503     case -90:
504     case -270:
505         sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
506         break;
507     default:
508         ASSERT_NOT_REACHED();
509         sampleRotation = MediaSample::VideoRotation::None;
510     }
511     if (sampleRotation == m_sampleRotation)
512         return;
513
514     m_sampleRotation = sampleRotation;
515     notifySettingsDidChangeObservers({ RealtimeMediaSourceSettings::Flag::Width, RealtimeMediaSourceSettings::Flag::Height });
516 }
517
518 void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
519 {
520     if (!isProducingData() || muted())
521         return;
522
523     m_buffer = &sample.get();
524     setIntrinsicSize(expandedIntSize(sample->presentationSize()));
525     dispatchMediaSampleToObservers(WTFMove(sample));
526 }
527
528 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutput*, CMSampleBufferRef sampleBuffer, AVCaptureConnection* captureConnection)
529 {
530     if (m_framesToDropAtStartup && m_framesToDropAtStartup--)
531         return;
532
533     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
534     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
535         processNewFrame(WTFMove(sample));
536     });
537 }
538
539 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
540 {
541     scheduleDeferredTask([this, state] {
542         ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, state);
543         if ((state == m_isRunning) && (state == !muted()))
544             return;
545
546         m_isRunning = state;
547         if (m_isRunning)
548             m_framesToDropAtStartup = 4;
549
550         notifyMutedChange(!m_isRunning);
551     });
552 }
553
554 void AVVideoCaptureSource::captureDeviceSuspendedDidChange()
555 {
556 #if !PLATFORM(IOS_FAMILY)
557     scheduleDeferredTask([this] {
558         auto isSuspended = [m_device isSuspended];
559         ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, !!isSuspended);
560         if (isSuspended == muted())
561             return;
562
563         notifyMutedChange(isSuspended);
564     });
565 #endif
566 }
567
568 bool AVVideoCaptureSource::interrupted() const
569 {
570     if (m_interruption != InterruptionReason::None)
571         return true;
572
573     return RealtimeMediaSource::interrupted();
574 }
575
576 void AVVideoCaptureSource::generatePresets()
577 {
578     Vector<Ref<VideoPreset>> presets;
579     for (AVCaptureDeviceFormat* format in [device() formats]) {
580
581         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
582         IntSize size = { dimensions.width, dimensions.height };
583         auto index = presets.findMatching([&size](auto& preset) {
584             return size == preset->size;
585         });
586         if (index != notFound)
587             continue;
588
589         Vector<FrameRateRange> frameRates;
590         for (AVFrameRateRange* range in [format videoSupportedFrameRateRanges])
591             frameRates.append({ range.minFrameRate, range.maxFrameRate});
592
593         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
594     }
595
596     setSupportedPresets(WTFMove(presets));
597 }
598
599 #if PLATFORM(IOS_FAMILY)
600 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
601 {
602     ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, [[error localizedDescription] UTF8String]);
603
604     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
605         return;
606
607     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
608     [m_session startRunning];
609     m_isRunning = [m_session isRunning];
610 }
611
612 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
613 {
614     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, [notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
615     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
616 }
617
618 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
619 {
620     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
621
622     InterruptionReason reason = m_interruption;
623
624     m_interruption = InterruptionReason::None;
625     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
626         return;
627
628     [m_session startRunning];
629     m_isRunning = [m_session isRunning];
630 }
631 #endif
632
633 void AVVideoCaptureSource::deviceDisconnected(RetainPtr<NSNotification> notification)
634 {
635     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
636     if (this->device() == [notification object])
637         captureFailed();
638 }
639
640
641 } // namespace WebCore
642
643 @implementation WebCoreAVVideoCaptureSourceObserver
644
645 - (id)initWithCallback:(AVVideoCaptureSource*)callback
646 {
647     self = [super init];
648     if (!self)
649         return nil;
650
651     m_callback = callback;
652
653     return self;
654 }
655
656 - (void)disconnect
657 {
658     [NSObject cancelPreviousPerformRequestsWithTarget:self];
659     [self removeNotificationObservers];
660     m_callback = nullptr;
661 }
662
663 - (void)addNotificationObservers
664 {
665     ASSERT(m_callback);
666
667     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
668
669     [center addObserver:self selector:@selector(deviceConnectedDidChange:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
670
671 #if PLATFORM(IOS_FAMILY)
672     AVCaptureSession* session = m_callback->session();
673     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
674     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
675     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
676 #endif
677 }
678
679 - (void)removeNotificationObservers
680 {
681     [[NSNotificationCenter defaultCenter] removeObserver:self];
682 }
683
684 - (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
685 {
686     if (!m_callback)
687         return;
688
689     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
690 }
691
692 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
693 {
694     UNUSED_PARAM(object);
695     UNUSED_PARAM(context);
696
697     if (!m_callback)
698         return;
699
700     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
701
702     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
703
704 #if !RELEASE_LOG_DISABLED
705     if (m_callback->loggerPtr() && m_callback->logger().willLog(m_callback->logChannel(), WTFLogLevel::Debug)) {
706         auto identifier = Logger::LogSiteIdentifier("AVVideoCaptureSource", "observeValueForKeyPath", m_callback->logIdentifier());
707
708         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
709         m_callback->logger().debug(m_callback->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", [valueString.get() UTF8String]);
710     }
711 #endif
712
713     if (!willChange && [keyPath isEqualToString:@"running"])
714         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
715     if (!willChange && [keyPath isEqualToString:@"suspended"])
716         m_callback->captureDeviceSuspendedDidChange();
717 }
718
719 - (void)deviceConnectedDidChange:(NSNotification*)notification
720 {
721     if (m_callback)
722         m_callback->deviceDisconnected(notification);
723 }
724
725 #if PLATFORM(IOS_FAMILY)
726 - (void)sessionRuntimeError:(NSNotification*)notification
727 {
728     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
729     if (m_callback)
730         m_callback->captureSessionRuntimeError(error);
731 }
732
733 - (void)beginSessionInterrupted:(NSNotification*)notification
734 {
735     if (m_callback)
736         m_callback->captureSessionBeginInterruption(notification);
737 }
738
739 - (void)endSessionInterrupted:(NSNotification*)notification
740 {
741     if (m_callback)
742         m_callback->captureSessionEndInterruption(notification);
743 }
744 #endif
745
746 @end
747
748 #endif // ENABLE(MEDIA_STREAM)