Changing settings of a MediaStreamTrack clone should not alter the settings of the...
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "ImageTransferSessionVT.h"
33 #import "IntRect.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "MediaSampleAVFObjC.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenter.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoSource.h"
41 #import "RealtimeVideoUtilities.h"
42 #import <AVFoundation/AVCaptureDevice.h>
43 #import <AVFoundation/AVCaptureInput.h>
44 #import <AVFoundation/AVCaptureOutput.h>
45 #import <AVFoundation/AVCaptureSession.h>
46 #import <AVFoundation/AVError.h>
47 #import <objc/runtime.h>
48
49 #import "CoreVideoSoftLink.h"
50 #import <pal/cocoa/AVFoundationSoftLink.h>
51 #import <pal/cf/CoreMediaSoftLink.h>
52
53 using namespace WebCore;
54 using namespace PAL;
55
56 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
57     AVVideoCaptureSource* m_callback;
58 }
59
60 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
61 -(void)disconnect;
62 -(void)addNotificationObservers;
63 -(void)removeNotificationObservers;
64 -(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection;
65 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
66 #if PLATFORM(IOS_FAMILY)
67 -(void)sessionRuntimeError:(NSNotification*)notification;
68 -(void)beginSessionInterrupted:(NSNotification*)notification;
69 -(void)endSessionInterrupted:(NSNotification*)notification;
70 -(void)deviceConnectedDidChange:(NSNotification*)notification;
71 #endif
72 @end
73
74 namespace WebCore {
75
76 static inline OSType avVideoCapturePixelBufferFormat()
77 {
78     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
79 #if PLATFORM(MAC)
80     return kCVPixelFormatType_420YpCbCr8Planar;
81 #else
82     return preferedPixelBufferFormat();
83 #endif
84 }
85
86 static dispatch_queue_t globaVideoCaptureSerialQueue()
87 {
88     static dispatch_queue_t globalQueue;
89     static dispatch_once_t onceToken;
90     dispatch_once(&onceToken, ^{
91         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
92     });
93     return globalQueue;
94 }
95
96 class AVVideoPreset : public VideoPreset {
97 public:
98     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormat* format)
99     {
100         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
101     }
102
103     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormat* format)
104         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
105         , format(format)
106     {
107     }
108
109     RetainPtr<AVCaptureDeviceFormat> format;
110 };
111
112 CaptureSourceOrError AVVideoCaptureSource::create(String&& id, String&& hashSalt, const MediaConstraints* constraints)
113 {
114     AVCaptureDevice *device = [PAL::getAVCaptureDeviceClass() deviceWithUniqueID:id];
115     if (!device)
116         return { };
117
118     auto source = adoptRef(*new AVVideoCaptureSource(device, WTFMove(id), WTFMove(hashSalt)));
119     if (constraints) {
120         auto result = source->applyConstraints(*constraints);
121         if (result)
122             return WTFMove(result.value().badConstraint);
123     }
124
125     return CaptureSourceOrError(RealtimeVideoSource::create(WTFMove(source)));
126 }
127
128 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDevice* device, String&& id, String&& hashSalt)
129     : RealtimeVideoCaptureSource(device.localizedName, WTFMove(id), WTFMove(hashSalt))
130     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
131     , m_device(device)
132 {
133 #if PLATFORM(IOS_FAMILY)
134     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
135     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
136     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
137     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
138 #endif
139
140     [m_device.get() addObserver:m_objcObserver.get() forKeyPath:@"suspended" options:NSKeyValueObservingOptionNew context:(void *)nil];
141 }
142
143 AVVideoCaptureSource::~AVVideoCaptureSource()
144 {
145 #if PLATFORM(IOS_FAMILY)
146     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().unsetActiveSource(*this);
147 #endif
148     [m_objcObserver disconnect];
149     [m_device removeObserver:m_objcObserver.get() forKeyPath:@"suspended"];
150
151     if (!m_session)
152         return;
153
154     if ([m_session isRunning])
155         [m_session stopRunning];
156
157     clearSession();
158 }
159
160 void AVVideoCaptureSource::clearSession()
161 {
162     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
163     ASSERT(m_session);
164     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"running"];
165     m_session = nullptr;
166 }
167
168 void AVVideoCaptureSource::startProducingData()
169 {
170     if (!m_session) {
171         if (!setupSession())
172             return;
173     }
174
175     if ([m_session isRunning])
176         return;
177
178     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
179     [m_objcObserver addNotificationObservers];
180     [m_session startRunning];
181 }
182
183 void AVVideoCaptureSource::stopProducingData()
184 {
185     if (!m_session)
186         return;
187
188     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
189     [m_objcObserver removeNotificationObservers];
190
191     if ([m_session isRunning])
192         [m_session stopRunning];
193
194     m_interruption = InterruptionReason::None;
195 #if PLATFORM(IOS_FAMILY)
196     clearSession();
197 #endif
198 }
199
200 void AVVideoCaptureSource::beginConfiguration()
201 {
202     if (m_session)
203         [m_session beginConfiguration];
204 }
205
206 void AVVideoCaptureSource::commitConfiguration()
207 {
208     if (m_session)
209         [m_session commitConfiguration];
210 }
211
212 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
213 {
214     m_currentSettings = WTF::nullopt;
215 }
216
217 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
218 {
219     if (m_currentSettings)
220         return *m_currentSettings;
221
222     RealtimeMediaSourceSettings settings;
223     if ([device() position] == AVCaptureDevicePositionFront)
224         settings.setFacingMode(RealtimeMediaSourceSettings::User);
225     else if ([device() position] == AVCaptureDevicePositionBack)
226         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
227     else
228         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
229
230     settings.setFrameRate(frameRate());
231
232     auto size = this->size();
233     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
234         size = size.transposedSize();
235     
236     settings.setWidth(size.width());
237     settings.setHeight(size.height());
238     settings.setDeviceId(hashedId());
239
240     RealtimeMediaSourceSupportedConstraints supportedConstraints;
241     supportedConstraints.setSupportsDeviceId(true);
242     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
243     supportedConstraints.setSupportsWidth(true);
244     supportedConstraints.setSupportsHeight(true);
245     supportedConstraints.setSupportsAspectRatio(true);
246     supportedConstraints.setSupportsFrameRate(true);
247
248     settings.setSupportedConstraints(supportedConstraints);
249
250     m_currentSettings = WTFMove(settings);
251
252     return *m_currentSettings;
253 }
254
255 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
256 {
257     if (m_capabilities)
258         return *m_capabilities;
259
260     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
261     capabilities.setDeviceId(hashedId());
262
263     AVCaptureDevice *videoDevice = device();
264     if ([videoDevice position] == AVCaptureDevicePositionFront)
265         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
266     if ([videoDevice position] == AVCaptureDevicePositionBack)
267         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
268
269     updateCapabilities(capabilities);
270
271     m_capabilities = WTFMove(capabilities);
272
273     return *m_capabilities;
274 }
275
276 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
277 {
278 #if PLATFORM(IOS_FAMILY)
279     return [static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
280 #else
281     UNUSED_PARAM(preset);
282 #endif
283
284     return true;
285 }
286
287 void AVVideoCaptureSource::setFrameRateWithPreset(double requestedFrameRate, RefPtr<VideoPreset> preset)
288 {
289     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
290     m_currentPreset = avPreset;
291     m_currentFrameRate = requestedFrameRate;
292
293     setSessionSizeAndFrameRate();
294 }
295
296 void AVVideoCaptureSource::setSessionSizeAndFrameRate()
297 {
298     if (!m_session)
299         return;
300
301     auto* avPreset = m_currentPreset.get();
302     if (!avPreset)
303         return;
304
305     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, SizeAndFrameRate { m_currentPreset->size.width(), m_currentPreset->size.height(), m_currentFrameRate });
306
307     ASSERT(avPreset->format);
308
309     NSError *error = nil;
310     [m_session beginConfiguration];
311     @try {
312         if ([device() lockForConfiguration:&error]) {
313             ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, "setting preset to ", m_currentSize);
314             [device() setActiveFormat:avPreset->format.get()];
315
316 #if PLATFORM(MAC)
317             auto settingsDictionary = @{
318                 (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
319                 (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
320                 (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height()),
321                 (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{ }
322             };
323             [m_videoOutput setVideoSettings:settingsDictionary];
324 #endif
325
326             auto* frameRateRange = frameDurationForFrameRate(m_currentFrameRate);
327             ASSERT(frameRateRange);
328             if (frameRateRange) {
329                 m_currentFrameRate = clampTo(m_currentFrameRate, frameRateRange.minFrameRate, frameRateRange.maxFrameRate);
330
331                 auto frameDuration = CMTimeMake(1, m_currentFrameRate);
332                 if (CMTimeCompare(frameDuration, frameRateRange.minFrameDuration) < 0)
333                     frameDuration = frameRateRange.minFrameDuration;
334                 else if (CMTimeCompare(frameDuration, frameRateRange.maxFrameDuration) > 0)
335                     frameDuration = frameRateRange.maxFrameDuration;
336
337                 ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, "setting frame rate to ", m_currentFrameRate, ", duration ", PAL::toMediaTime(frameDuration));
338
339                 [device() setActiveVideoMinFrameDuration: frameDuration];
340                 [device() setActiveVideoMaxFrameDuration: frameDuration];
341             } else
342                 ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "cannot find proper frame rate range for the selected preset\n");
343
344             [device() unlockForConfiguration];
345         }
346     } @catch(NSException *exception) {
347         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "error configuring device ", [[exception name] UTF8String], ", reason : ", [[exception reason] UTF8String]);
348         [device() unlockForConfiguration];
349         ASSERT_NOT_REACHED();
350     }
351     [m_session commitConfiguration];
352
353     ERROR_LOG_IF(error && loggerPtr(), LOGIDENTIFIER, [[error localizedDescription] UTF8String]);
354 }
355
356 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
357 {
358 #if PLATFORM(IOS_FAMILY)
359     switch (videoOrientation) {
360     case AVCaptureVideoOrientationPortrait:
361         return 180;
362     case AVCaptureVideoOrientationPortraitUpsideDown:
363         return 0;
364     case AVCaptureVideoOrientationLandscapeRight:
365         return 90;
366     case AVCaptureVideoOrientationLandscapeLeft:
367         return -90;
368     }
369 #else
370     switch (videoOrientation) {
371     case AVCaptureVideoOrientationPortrait:
372         return 0;
373     case AVCaptureVideoOrientationPortraitUpsideDown:
374         return 180;
375     case AVCaptureVideoOrientationLandscapeRight:
376         return 90;
377     case AVCaptureVideoOrientationLandscapeLeft:
378         return -90;
379     }
380 #endif
381 }
382
383 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutput* videoOutput)
384 {
385     AVCaptureConnection* connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
386     return connection ? sensorOrientation([connection videoOrientation]) : 0;
387 }
388
389 bool AVVideoCaptureSource::setupSession()
390 {
391     if (m_session)
392         return true;
393
394     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
395
396     m_session = adoptNS([PAL::allocAVCaptureSessionInstance() init]);
397     [m_session addObserver:m_objcObserver.get() forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:(void *)nil];
398
399     [m_session beginConfiguration];
400     bool success = setupCaptureSession();
401     [m_session commitConfiguration];
402
403     if (!success)
404         captureFailed();
405
406     return success;
407 }
408
409 AVFrameRateRange* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
410 {
411     AVFrameRateRange *bestFrameRateRange = nil;
412     for (AVFrameRateRange *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
413         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
414             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
415                 bestFrameRateRange = frameRateRange;
416         }
417     }
418
419     if (!bestFrameRateRange)
420         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "no frame rate range for rate ", rate);
421
422     return bestFrameRateRange;
423 }
424
425 bool AVVideoCaptureSource::setupCaptureSession()
426 {
427     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
428
429 #if PLATFORM(IOS_FAMILY)
430     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().setActiveSource(*this);
431 #endif
432
433     NSError *error = nil;
434     RetainPtr<AVCaptureDeviceInput> videoIn = adoptNS([PAL::allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
435     if (error) {
436         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "failed to allocate AVCaptureDeviceInput ", [[error localizedDescription] UTF8String]);
437         return false;
438     }
439
440     if (![session() canAddInput:videoIn.get()]) {
441         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "unable to add video input device");
442         return false;
443     }
444     [session() addInput:videoIn.get()];
445
446     m_videoOutput = adoptNS([PAL::allocAVCaptureVideoDataOutputInstance() init]);
447     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
448
449     [m_videoOutput setVideoSettings:settingsDictionary.get()];
450     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
451     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
452
453     if (![session() canAddOutput:m_videoOutput.get()]) {
454         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "unable to add video output device");
455         return false;
456     }
457     [session() addOutput:m_videoOutput.get()];
458
459     setSessionSizeAndFrameRate();
460
461     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
462     computeSampleRotation();
463
464     return true;
465 }
466
467 void AVVideoCaptureSource::shutdownCaptureSession()
468 {
469     m_buffer = nullptr;
470 }
471
472 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
473 {
474 #if PLATFORM(IOS_FAMILY)
475     notifier.addObserver(*this);
476     orientationChanged(notifier.orientation());
477 #else
478     UNUSED_PARAM(notifier);
479 #endif
480 }
481
482 void AVVideoCaptureSource::orientationChanged(int orientation)
483 {
484     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
485     m_deviceOrientation = orientation;
486     computeSampleRotation();
487 }
488
489 void AVVideoCaptureSource::computeSampleRotation()
490 {
491     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
492     MediaSample::VideoRotation sampleRotation;
493     switch (m_sensorOrientation - m_deviceOrientation) {
494     case 0:
495         sampleRotation = MediaSample::VideoRotation::None;
496         break;
497     case 180:
498     case -180:
499         sampleRotation = MediaSample::VideoRotation::UpsideDown;
500         break;
501     case 90:
502         sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
503         break;
504     case -90:
505     case -270:
506         sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
507         break;
508     default:
509         ASSERT_NOT_REACHED();
510         sampleRotation = MediaSample::VideoRotation::None;
511     }
512     if (sampleRotation == m_sampleRotation)
513         return;
514
515     m_sampleRotation = sampleRotation;
516     notifySettingsDidChangeObservers({ RealtimeMediaSourceSettings::Flag::Width, RealtimeMediaSourceSettings::Flag::Height });
517 }
518
519 void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
520 {
521     if (!isProducingData() || muted())
522         return;
523
524     m_buffer = &sample.get();
525     setIntrinsicSize(expandedIntSize(sample->presentationSize()));
526     dispatchMediaSampleToObservers(WTFMove(sample));
527 }
528
529 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutput*, CMSampleBufferRef sampleBuffer, AVCaptureConnection* captureConnection)
530 {
531     if (m_framesToDropAtStartup && m_framesToDropAtStartup--)
532         return;
533
534     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
535     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
536         processNewFrame(WTFMove(sample));
537     });
538 }
539
540 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
541 {
542     scheduleDeferredTask([this, state] {
543         ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, state);
544         if ((state == m_isRunning) && (state == !muted()))
545             return;
546
547         m_isRunning = state;
548         if (m_isRunning)
549             m_framesToDropAtStartup = 4;
550
551         notifyMutedChange(!m_isRunning);
552     });
553 }
554
555 void AVVideoCaptureSource::captureDeviceSuspendedDidChange()
556 {
557 #if !PLATFORM(IOS_FAMILY)
558     scheduleDeferredTask([this] {
559         auto isSuspended = [m_device isSuspended];
560         ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, !!isSuspended);
561         if (isSuspended == muted())
562             return;
563
564         notifyMutedChange(isSuspended);
565     });
566 #endif
567 }
568
569 bool AVVideoCaptureSource::interrupted() const
570 {
571     if (m_interruption != InterruptionReason::None)
572         return true;
573
574     return RealtimeMediaSource::interrupted();
575 }
576
577 void AVVideoCaptureSource::generatePresets()
578 {
579     Vector<Ref<VideoPreset>> presets;
580     for (AVCaptureDeviceFormat* format in [device() formats]) {
581
582         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
583         IntSize size = { dimensions.width, dimensions.height };
584         auto index = presets.findMatching([&size](auto& preset) {
585             return size == preset->size;
586         });
587         if (index != notFound)
588             continue;
589
590         Vector<FrameRateRange> frameRates;
591         for (AVFrameRateRange* range in [format videoSupportedFrameRateRanges])
592             frameRates.append({ range.minFrameRate, range.maxFrameRate});
593
594         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
595     }
596
597     setSupportedPresets(WTFMove(presets));
598 }
599
600 #if PLATFORM(IOS_FAMILY)
601 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
602 {
603     ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, [[error localizedDescription] UTF8String]);
604
605     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
606         return;
607
608     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
609     [m_session startRunning];
610     m_isRunning = [m_session isRunning];
611 }
612
613 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
614 {
615     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, [notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
616     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
617 }
618
619 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
620 {
621     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
622
623     InterruptionReason reason = m_interruption;
624
625     m_interruption = InterruptionReason::None;
626     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
627         return;
628
629     [m_session startRunning];
630     m_isRunning = [m_session isRunning];
631 }
632 #endif
633
634 void AVVideoCaptureSource::deviceDisconnected(RetainPtr<NSNotification> notification)
635 {
636     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
637     if (this->device() == [notification object])
638         captureFailed();
639 }
640
641 } // namespace WebCore
642
643 @implementation WebCoreAVVideoCaptureSourceObserver
644
645 - (id)initWithCallback:(AVVideoCaptureSource*)callback
646 {
647     self = [super init];
648     if (!self)
649         return nil;
650
651     m_callback = callback;
652
653     return self;
654 }
655
656 - (void)disconnect
657 {
658     [NSObject cancelPreviousPerformRequestsWithTarget:self];
659     [self removeNotificationObservers];
660     m_callback = nullptr;
661 }
662
663 - (void)addNotificationObservers
664 {
665     ASSERT(m_callback);
666
667     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
668
669     [center addObserver:self selector:@selector(deviceConnectedDidChange:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
670
671 #if PLATFORM(IOS_FAMILY)
672     AVCaptureSession* session = m_callback->session();
673     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
674     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
675     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
676 #endif
677 }
678
679 - (void)removeNotificationObservers
680 {
681     [[NSNotificationCenter defaultCenter] removeObserver:self];
682 }
683
684 - (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
685 {
686     if (!m_callback)
687         return;
688
689     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
690 }
691
692 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
693 {
694     UNUSED_PARAM(object);
695     UNUSED_PARAM(context);
696
697     if (!m_callback)
698         return;
699
700     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
701
702     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
703
704 #if !RELEASE_LOG_DISABLED
705     if (m_callback->loggerPtr() && m_callback->logger().willLog(m_callback->logChannel(), WTFLogLevel::Debug)) {
706         auto identifier = Logger::LogSiteIdentifier("AVVideoCaptureSource", "observeValueForKeyPath", m_callback->logIdentifier());
707
708         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
709         m_callback->logger().debug(m_callback->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", [valueString.get() UTF8String]);
710     }
711 #endif
712
713     if (!willChange && [keyPath isEqualToString:@"running"])
714         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
715     if (!willChange && [keyPath isEqualToString:@"suspended"])
716         m_callback->captureDeviceSuspendedDidChange();
717 }
718
719 - (void)deviceConnectedDidChange:(NSNotification*)notification
720 {
721     if (m_callback)
722         m_callback->deviceDisconnected(notification);
723 }
724
725 #if PLATFORM(IOS_FAMILY)
726 - (void)sessionRuntimeError:(NSNotification*)notification
727 {
728     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
729     if (m_callback)
730         m_callback->captureSessionRuntimeError(error);
731 }
732
733 - (void)beginSessionInterrupted:(NSNotification*)notification
734 {
735     if (m_callback)
736         m_callback->captureSessionBeginInterruption(notification);
737 }
738
739 - (void)endSessionInterrupted:(NSNotification*)notification
740 {
741     if (m_callback)
742         m_callback->captureSessionEndInterruption(notification);
743 }
744 #endif
745
746 @end
747
748 #endif // ENABLE(MEDIA_STREAM)