[MediaStream] A stream's first video frame should be rendered
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "ImageTransferSessionVT.h"
33 #import "IntRect.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "MediaSampleAVFObjC.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenter.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoUtilities.h"
41 #import <AVFoundation/AVCaptureDevice.h>
42 #import <AVFoundation/AVCaptureInput.h>
43 #import <AVFoundation/AVCaptureOutput.h>
44 #import <AVFoundation/AVCaptureSession.h>
45 #import <AVFoundation/AVError.h>
46 #import <objc/runtime.h>
47
48 #import <pal/cf/CoreMediaSoftLink.h>
49 #import "CoreVideoSoftLink.h"
50
51 typedef AVCaptureConnection AVCaptureConnectionType;
52 typedef AVCaptureDevice AVCaptureDeviceTypedef;
53 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
54 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
55 typedef AVCaptureOutput AVCaptureOutputType;
56 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
57 typedef AVFrameRateRange AVFrameRateRangeType;
58 typedef AVCaptureSession AVCaptureSessionType;
59
60 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
61
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
68 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
69 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
70
71 #define AVCaptureConnection getAVCaptureConnectionClass()
72 #define AVCaptureDevice getAVCaptureDeviceClass()
73 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
74 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
75 #define AVCaptureOutput getAVCaptureOutputClass()
76 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
77 #define AVFrameRateRange getAVFrameRateRangeClass()
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
80
81 #if PLATFORM(IOS_FAMILY)
82 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionRuntimeErrorNotification, NSString *)
83 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionWasInterruptedNotification, NSString *)
84 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionEndedNotification, NSString *)
85 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionReasonKey, NSString *)
86 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionErrorKey, NSString *)
87
88 #define AVCaptureSessionRuntimeErrorNotification getAVCaptureSessionRuntimeErrorNotification()
89 #define AVCaptureSessionWasInterruptedNotification getAVCaptureSessionWasInterruptedNotification()
90 #define AVCaptureSessionInterruptionEndedNotification getAVCaptureSessionInterruptionEndedNotification()
91 #define AVCaptureSessionInterruptionReasonKey getAVCaptureSessionInterruptionReasonKey()
92 #define AVCaptureSessionErrorKey getAVCaptureSessionErrorKey()
93 #endif
94
95 using namespace WebCore;
96 using namespace PAL;
97
98 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
99     AVVideoCaptureSource* m_callback;
100 }
101
102 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
103 -(void)disconnect;
104 -(void)addNotificationObservers;
105 -(void)removeNotificationObservers;
106 -(void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection;
107 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
108 #if PLATFORM(IOS_FAMILY)
109 -(void)sessionRuntimeError:(NSNotification*)notification;
110 -(void)beginSessionInterrupted:(NSNotification*)notification;
111 -(void)endSessionInterrupted:(NSNotification*)notification;
112 #endif
113 @end
114
115 namespace WebCore {
116
117 static inline OSType avVideoCapturePixelBufferFormat()
118 {
119     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
120 #if PLATFORM(MAC)
121     return kCVPixelFormatType_420YpCbCr8Planar;
122 #else
123     return preferedPixelBufferFormat();
124 #endif
125 }
126
127 static dispatch_queue_t globaVideoCaptureSerialQueue()
128 {
129     static dispatch_queue_t globalQueue;
130     static dispatch_once_t onceToken;
131     dispatch_once(&onceToken, ^{
132         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
133     });
134     return globalQueue;
135 }
136
137 class AVVideoPreset : public VideoPreset {
138 public:
139     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
140     {
141         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
142     }
143
144     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
145         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
146         , format(format)
147     {
148     }
149
150     RetainPtr<AVCaptureDeviceFormatType> format;
151 };
152
153 CaptureSourceOrError AVVideoCaptureSource::create(String&& id, String&& hashSalt, const MediaConstraints* constraints)
154 {
155     AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:id];
156     if (!device)
157         return { };
158
159     auto source = adoptRef(*new AVVideoCaptureSource(device, WTFMove(id), WTFMove(hashSalt)));
160     if (constraints) {
161         auto result = source->applyConstraints(*constraints);
162         if (result)
163             return WTFMove(result.value().first);
164     }
165
166     return CaptureSourceOrError(WTFMove(source));
167 }
168
169 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, String&& id, String&& hashSalt)
170     : RealtimeVideoSource(device.localizedName, WTFMove(id), WTFMove(hashSalt))
171     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
172     , m_device(device)
173 {
174 #if PLATFORM(IOS_FAMILY)
175     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
176     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
177     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
178     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
179 #endif
180 }
181
182 AVVideoCaptureSource::~AVVideoCaptureSource()
183 {
184 #if PLATFORM(IOS_FAMILY)
185     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().unsetActiveSource(*this);
186 #endif
187     [m_objcObserver disconnect];
188
189     if (!m_session)
190         return;
191
192     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"running"];
193     [m_device removeObserver:m_objcObserver.get() forKeyPath:@"suspended"];
194     if ([m_session isRunning])
195         [m_session stopRunning];
196 }
197
198 void AVVideoCaptureSource::startProducingData()
199 {
200     if (!m_session) {
201         if (!setupSession())
202             return;
203     }
204
205     if ([m_session isRunning])
206         return;
207
208     [m_objcObserver addNotificationObservers];
209     [m_session startRunning];
210 }
211
212 void AVVideoCaptureSource::stopProducingData()
213 {
214     if (!m_session)
215         return;
216
217     [m_objcObserver removeNotificationObservers];
218
219     if ([m_session isRunning])
220         [m_session stopRunning];
221
222     m_interruption = InterruptionReason::None;
223 #if PLATFORM(IOS_FAMILY)
224     m_session = nullptr;
225 #endif
226 }
227
228 void AVVideoCaptureSource::beginConfiguration()
229 {
230     if (m_session)
231         [m_session beginConfiguration];
232 }
233
234 void AVVideoCaptureSource::commitConfiguration()
235 {
236     if (m_session)
237         [m_session commitConfiguration];
238 }
239
240 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
241 {
242     m_currentSettings = std::nullopt;
243 }
244
245 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
246 {
247     if (m_currentSettings)
248         return *m_currentSettings;
249
250     RealtimeMediaSourceSettings settings;
251     if ([device() position] == AVCaptureDevicePositionFront)
252         settings.setFacingMode(RealtimeMediaSourceSettings::User);
253     else if ([device() position] == AVCaptureDevicePositionBack)
254         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
255     else
256         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
257
258     settings.setFrameRate(frameRate());
259     auto& size = this->size();
260     settings.setWidth(size.width());
261     settings.setHeight(size.height());
262     settings.setDeviceId(hashedId());
263
264     RealtimeMediaSourceSupportedConstraints supportedConstraints;
265     supportedConstraints.setSupportsDeviceId(true);
266     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
267     supportedConstraints.setSupportsWidth(true);
268     supportedConstraints.setSupportsHeight(true);
269     supportedConstraints.setSupportsAspectRatio(true);
270     supportedConstraints.setSupportsFrameRate(true);
271
272     settings.setSupportedConstraints(supportedConstraints);
273
274     m_currentSettings = WTFMove(settings);
275
276     return *m_currentSettings;
277 }
278
279 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
280 {
281     if (m_capabilities)
282         return *m_capabilities;
283
284     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
285     capabilities.setDeviceId(hashedId());
286
287     AVCaptureDeviceTypedef *videoDevice = device();
288     if ([videoDevice position] == AVCaptureDevicePositionFront)
289         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
290     if ([videoDevice position] == AVCaptureDevicePositionBack)
291         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
292
293     updateCapabilities(capabilities);
294
295     m_capabilities = WTFMove(capabilities);
296
297     return *m_capabilities;
298 }
299
300 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
301 {
302 #if PLATFORM(IOS_FAMILY)
303     return [static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
304 #else
305     UNUSED_PARAM(preset);
306 #endif
307
308     return true;
309 }
310
311 void AVVideoCaptureSource::setSizeAndFrameRateWithPreset(IntSize requestedSize, double requestedFrameRate, RefPtr<VideoPreset> preset)
312 {
313     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
314
315     if (!m_session) {
316         m_pendingPreset = avPreset;
317         m_pendingSize = requestedSize;
318         m_pendingFrameRate = requestedFrameRate;
319         return;
320     }
321
322     m_pendingPreset = nullptr;
323     m_pendingFrameRate = 0;
324
325     auto* frameRateRange = frameDurationForFrameRate(requestedFrameRate);
326     ASSERT(frameRateRange);
327     if (!frameRateRange)
328         return;
329
330     if (!avPreset)
331         return;
332
333     ASSERT(avPreset->format);
334
335     NSError *error = nil;
336     [m_session beginConfiguration];
337     @try {
338         if ([device() lockForConfiguration:&error]) {
339             if (!m_currentPreset || ![m_currentPreset->format.get() isEqual:avPreset->format.get()]) {
340                 [device() setActiveFormat:avPreset->format.get()];
341 #if PLATFORM(MAC)
342                 auto settingsDictionary = @{
343                     (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
344                     (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
345                     (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height()),
346                     (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{ }
347                 };
348                 [m_videoOutput setVideoSettings:settingsDictionary];
349 #endif
350             }
351             [device() setActiveVideoMinFrameDuration:[frameRateRange minFrameDuration]];
352             [device() setActiveVideoMaxFrameDuration:[frameRateRange maxFrameDuration]];
353             [device() unlockForConfiguration];
354         }
355     } @catch(NSException *exception) {
356         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - exception thrown configuring device: <%s> %s", [[exception name] UTF8String], [[exception reason] UTF8String]);
357     }
358     [m_session commitConfiguration];
359
360     m_currentPreset = avPreset;
361
362     if (error)
363         RELEASE_LOG(Media, "AVVideoCaptureSource::setFrameRate - failed to lock video device for configuration: %s", [[error localizedDescription] UTF8String]);
364 }
365
366 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
367 {
368 #if PLATFORM(IOS_FAMILY)
369     switch (videoOrientation) {
370     case AVCaptureVideoOrientationPortrait:
371         return 180;
372     case AVCaptureVideoOrientationPortraitUpsideDown:
373         return 0;
374     case AVCaptureVideoOrientationLandscapeRight:
375         return 90;
376     case AVCaptureVideoOrientationLandscapeLeft:
377         return -90;
378     }
379 #else
380     switch (videoOrientation) {
381     case AVCaptureVideoOrientationPortrait:
382         return 0;
383     case AVCaptureVideoOrientationPortraitUpsideDown:
384         return 180;
385     case AVCaptureVideoOrientationLandscapeRight:
386         return 90;
387     case AVCaptureVideoOrientationLandscapeLeft:
388         return -90;
389     }
390 #endif
391 }
392
393 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
394 {
395     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
396     return connection ? sensorOrientation([connection videoOrientation]) : 0;
397 }
398
399 bool AVVideoCaptureSource::setupSession()
400 {
401     if (m_session)
402         return true;
403
404     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
405     [m_session addObserver:m_objcObserver.get() forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:(void *)nil];
406     [m_device addObserver:m_objcObserver.get() forKeyPath:@"suspended" options:NSKeyValueObservingOptionNew context:(void *)nil];
407
408     [m_session beginConfiguration];
409     bool success = setupCaptureSession();
410     [m_session commitConfiguration];
411
412     if (!success)
413         captureFailed();
414
415     return success;
416 }
417
418 AVFrameRateRangeType* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
419 {
420     AVFrameRateRangeType *bestFrameRateRange = nil;
421     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
422         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
423             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
424                 bestFrameRateRange = frameRateRange;
425         }
426     }
427
428     if (!bestFrameRateRange)
429         RELEASE_LOG(Media, "AVVideoCaptureSource::frameDurationForFrameRate, no frame rate range for rate %g", rate);
430
431     return bestFrameRateRange;
432 }
433
434 bool AVVideoCaptureSource::setupCaptureSession()
435 {
436 #if PLATFORM(IOS_FAMILY)
437     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().setActiveSource(*this);
438 #endif
439
440     NSError *error = nil;
441     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
442     if (error) {
443         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
444         return false;
445     }
446
447     if (![session() canAddInput:videoIn.get()]) {
448         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
449         return false;
450     }
451     [session() addInput:videoIn.get()];
452
453     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
454     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
455
456     [m_videoOutput setVideoSettings:settingsDictionary.get()];
457     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
458     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
459
460     if (![session() canAddOutput:m_videoOutput.get()]) {
461         RELEASE_LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
462         return false;
463     }
464     [session() addOutput:m_videoOutput.get()];
465
466     if (m_pendingPreset || m_pendingFrameRate)
467         setSizeAndFrameRateWithPreset(m_pendingSize, m_pendingFrameRate, m_pendingPreset);
468
469     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
470     computeSampleRotation();
471
472     return true;
473 }
474
475 void AVVideoCaptureSource::shutdownCaptureSession()
476 {
477     m_buffer = nullptr;
478 }
479
480 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
481 {
482 #if PLATFORM(IOS_FAMILY)
483     notifier.addObserver(*this);
484     orientationChanged(notifier.orientation());
485 #else
486     UNUSED_PARAM(notifier);
487 #endif
488 }
489
490 void AVVideoCaptureSource::orientationChanged(int orientation)
491 {
492     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
493     m_deviceOrientation = orientation;
494     computeSampleRotation();
495 }
496
497 void AVVideoCaptureSource::computeSampleRotation()
498 {
499     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
500     switch (m_sensorOrientation - m_deviceOrientation) {
501     case 0:
502         m_sampleRotation = MediaSample::VideoRotation::None;
503         break;
504     case 180:
505     case -180:
506         m_sampleRotation = MediaSample::VideoRotation::UpsideDown;
507         break;
508     case 90:
509         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
510         break;
511     case -90:
512     case -270:
513         m_sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
514         break;
515     default:
516         ASSERT_NOT_REACHED();
517         m_sampleRotation = MediaSample::VideoRotation::None;
518     }
519 }
520
521 void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
522 {
523     if (!isProducingData() || muted())
524         return;
525
526     m_buffer = &sample.get();
527     setIntrinsicSize(expandedIntSize(sample->presentationSize()));
528     dispatchMediaSampleToObservers(WTFMove(sample));
529 }
530
531 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
532 {
533     if (m_framesToDropAtStartup && m_framesToDropAtStartup--)
534         return;
535
536     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
537     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
538         processNewFrame(WTFMove(sample));
539     });
540 }
541
542 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
543 {
544     scheduleDeferredTask([this, state] {
545         if ((state == m_isRunning) && (state == !muted()))
546             return;
547
548         m_isRunning = state;
549         if (m_isRunning)
550             m_framesToDropAtStartup = 4;
551
552         notifyMutedChange(!m_isRunning);
553     });
554 }
555
556 void AVVideoCaptureSource::captureDeviceSuspendedDidChange()
557 {
558 #if !PLATFORM(IOS_FAMILY)
559     scheduleDeferredTask([this] {
560         auto isSuspended = [m_device isSuspended];
561         if (isSuspended == muted())
562             return;
563
564         notifyMutedChange(isSuspended);
565     });
566 #endif
567 }
568
569 bool AVVideoCaptureSource::interrupted() const
570 {
571     if (m_interruption != InterruptionReason::None)
572         return true;
573
574     return RealtimeMediaSource::interrupted();
575 }
576
577 void AVVideoCaptureSource::generatePresets()
578 {
579     Vector<Ref<VideoPreset>> presets;
580     for (AVCaptureDeviceFormatType* format in [device() formats]) {
581
582         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
583         IntSize size = { dimensions.width, dimensions.height };
584         auto index = presets.findMatching([&size](auto& preset) {
585             return size == preset->size;
586         });
587         if (index != notFound)
588             continue;
589
590         Vector<FrameRateRange> frameRates;
591         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges])
592             frameRates.append({ range.minFrameRate, range.maxFrameRate});
593
594         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
595     }
596
597     setSupportedPresets(WTFMove(presets));
598 }
599
600 #if PLATFORM(IOS_FAMILY)
601 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
602 {
603     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
604         return;
605
606     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
607     [m_session startRunning];
608     m_isRunning = [m_session isRunning];
609 }
610
611 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
612 {
613     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
614 }
615
616 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
617 {
618     InterruptionReason reason = m_interruption;
619
620     m_interruption = InterruptionReason::None;
621     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
622         return;
623
624     [m_session startRunning];
625     m_isRunning = [m_session isRunning];
626 }
627 #endif
628
629 } // namespace WebCore
630
631 @implementation WebCoreAVVideoCaptureSourceObserver
632
633 - (id)initWithCallback:(AVVideoCaptureSource*)callback
634 {
635     self = [super init];
636     if (!self)
637         return nil;
638
639     m_callback = callback;
640
641     return self;
642 }
643
644 - (void)disconnect
645 {
646     [NSObject cancelPreviousPerformRequestsWithTarget:self];
647     [self removeNotificationObservers];
648     m_callback = nullptr;
649 }
650
651 - (void)addNotificationObservers
652 {
653 #if PLATFORM(IOS_FAMILY)
654     ASSERT(m_callback);
655
656     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
657     AVCaptureSessionType* session = m_callback->session();
658
659     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
660     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
661     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
662 #endif
663 }
664
665 - (void)removeNotificationObservers
666 {
667 #if PLATFORM(IOS_FAMILY)
668     [[NSNotificationCenter defaultCenter] removeObserver:self];
669 #endif
670 }
671
672 - (void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection
673 {
674     if (!m_callback)
675         return;
676
677     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
678 }
679
680 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
681 {
682     UNUSED_PARAM(object);
683     UNUSED_PARAM(context);
684
685     if (!m_callback)
686         return;
687
688     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
689
690     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
691 #if !LOG_DISABLED
692
693     if (willChange)
694         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
695     else {
696         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
697         LOG(Media, "WebCoreAVVideoCaptureSourceObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
698     }
699 #endif
700
701     if (!willChange && [keyPath isEqualToString:@"running"])
702         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
703     if (!willChange && [keyPath isEqualToString:@"suspended"])
704         m_callback->captureDeviceSuspendedDidChange();
705 }
706
707 #if PLATFORM(IOS_FAMILY)
708 - (void)sessionRuntimeError:(NSNotification*)notification
709 {
710     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
711     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::sessionRuntimeError(%p) - error = %s", self, [[error localizedDescription] UTF8String]);
712
713     if (m_callback)
714         m_callback->captureSessionRuntimeError(error);
715 }
716
717 - (void)beginSessionInterrupted:(NSNotification*)notification
718 {
719     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::beginSessionInterrupted(%p) - reason = %d", self, [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
720
721     if (m_callback)
722         m_callback->captureSessionBeginInterruption(notification);
723 }
724
725 - (void)endSessionInterrupted:(NSNotification*)notification
726 {
727     LOG(Media, "WebCoreAVVideoCaptureSourceObserver::endSessionInterrupted(%p)", self);
728
729     if (m_callback)
730         m_callback->captureSessionEndInterruption(notification);
731 }
732 #endif
733
734 @end
735
736 #endif // ENABLE(MEDIA_STREAM)