Add media stream release logging
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "ImageBuffer.h"
32 #import "ImageTransferSessionVT.h"
33 #import "IntRect.h"
34 #import "Logging.h"
35 #import "MediaConstraints.h"
36 #import "MediaSampleAVFObjC.h"
37 #import "PlatformLayer.h"
38 #import "RealtimeMediaSourceCenter.h"
39 #import "RealtimeMediaSourceSettings.h"
40 #import "RealtimeVideoUtilities.h"
41 #import <AVFoundation/AVCaptureDevice.h>
42 #import <AVFoundation/AVCaptureInput.h>
43 #import <AVFoundation/AVCaptureOutput.h>
44 #import <AVFoundation/AVCaptureSession.h>
45 #import <AVFoundation/AVError.h>
46 #import <objc/runtime.h>
47
48 #import <pal/cf/CoreMediaSoftLink.h>
49 #import "CoreVideoSoftLink.h"
50
51 typedef AVCaptureConnection AVCaptureConnectionType;
52 typedef AVCaptureDevice AVCaptureDeviceTypedef;
53 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
54 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
55 typedef AVCaptureOutput AVCaptureOutputType;
56 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
57 typedef AVFrameRateRange AVFrameRateRangeType;
58 typedef AVCaptureSession AVCaptureSessionType;
59
60 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
61
62 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
63 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
64 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
65 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
68 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
69 SOFT_LINK_CLASS(AVFoundation, AVCaptureSession)
70
71 #define AVCaptureConnection getAVCaptureConnectionClass()
72 #define AVCaptureDevice getAVCaptureDeviceClass()
73 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
74 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
75 #define AVCaptureOutput getAVCaptureOutputClass()
76 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
77 #define AVFrameRateRange getAVFrameRateRangeClass()
78
79 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
80
81 SOFT_LINK_CONSTANT(AVFoundation, AVCaptureDeviceWasDisconnectedNotification, NSString *)
82 #define AVCaptureDeviceWasDisconnectedNotification getAVCaptureDeviceWasDisconnectedNotification()
83
84 #if PLATFORM(IOS_FAMILY)
85 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionRuntimeErrorNotification, NSString *)
86 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionWasInterruptedNotification, NSString *)
87 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionEndedNotification, NSString *)
88 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionInterruptionReasonKey, NSString *)
89 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVCaptureSessionErrorKey, NSString *)
90
91 #define AVCaptureSessionRuntimeErrorNotification getAVCaptureSessionRuntimeErrorNotification()
92 #define AVCaptureSessionWasInterruptedNotification getAVCaptureSessionWasInterruptedNotification()
93 #define AVCaptureSessionInterruptionEndedNotification getAVCaptureSessionInterruptionEndedNotification()
94 #define AVCaptureSessionInterruptionReasonKey getAVCaptureSessionInterruptionReasonKey()
95 #define AVCaptureSessionErrorKey getAVCaptureSessionErrorKey()
96 #endif
97
98 using namespace WebCore;
99 using namespace PAL;
100
101 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> {
102     AVVideoCaptureSource* m_callback;
103 }
104
105 -(id)initWithCallback:(AVVideoCaptureSource*)callback;
106 -(void)disconnect;
107 -(void)addNotificationObservers;
108 -(void)removeNotificationObservers;
109 -(void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection;
110 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context;
111 #if PLATFORM(IOS_FAMILY)
112 -(void)sessionRuntimeError:(NSNotification*)notification;
113 -(void)beginSessionInterrupted:(NSNotification*)notification;
114 -(void)endSessionInterrupted:(NSNotification*)notification;
115 -(void)deviceConnectedDidChange:(NSNotification*)notification;
116 #endif
117 @end
118
119 namespace WebCore {
120
121 static inline OSType avVideoCapturePixelBufferFormat()
122 {
123     // FIXME: Use preferedPixelBufferFormat() once rdar://problem/44391444 is fixed.
124 #if PLATFORM(MAC)
125     return kCVPixelFormatType_420YpCbCr8Planar;
126 #else
127     return preferedPixelBufferFormat();
128 #endif
129 }
130
131 static dispatch_queue_t globaVideoCaptureSerialQueue()
132 {
133     static dispatch_queue_t globalQueue;
134     static dispatch_once_t onceToken;
135     dispatch_once(&onceToken, ^{
136         globalQueue = dispatch_queue_create_with_target("WebCoreAVVideoCaptureSource video capture queue", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
137     });
138     return globalQueue;
139 }
140
141 class AVVideoPreset : public VideoPreset {
142 public:
143     static Ref<AVVideoPreset> create(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
144     {
145         return adoptRef(*new AVVideoPreset(size, WTFMove(frameRateRanges), format));
146     }
147
148     AVVideoPreset(IntSize size, Vector<FrameRateRange>&& frameRateRanges, AVCaptureDeviceFormatType* format)
149         : VideoPreset(size, WTFMove(frameRateRanges), AVCapture)
150         , format(format)
151     {
152     }
153
154     RetainPtr<AVCaptureDeviceFormatType> format;
155 };
156
157 CaptureSourceOrError AVVideoCaptureSource::create(String&& id, String&& hashSalt, const MediaConstraints* constraints)
158 {
159     AVCaptureDeviceTypedef *device = [getAVCaptureDeviceClass() deviceWithUniqueID:id];
160     if (!device)
161         return { };
162
163     auto source = adoptRef(*new AVVideoCaptureSource(device, WTFMove(id), WTFMove(hashSalt)));
164     if (constraints) {
165         auto result = source->applyConstraints(*constraints);
166         if (result)
167             return WTFMove(result.value().badConstraint);
168     }
169
170     return CaptureSourceOrError(WTFMove(source));
171 }
172
173 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, String&& id, String&& hashSalt)
174     : RealtimeVideoSource(device.localizedName, WTFMove(id), WTFMove(hashSalt))
175     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] initWithCallback:this]))
176     , m_device(device)
177 {
178 #if PLATFORM(IOS_FAMILY)
179     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInBackground) == static_cast<int>(AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground), "InterruptionReason::VideoNotAllowedInBackground is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground as expected");
180     static_assert(static_cast<int>(InterruptionReason::VideoNotAllowedInSideBySide) == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps, "InterruptionReason::VideoNotAllowedInSideBySide is not AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps as expected");
181     static_assert(static_cast<int>(InterruptionReason::VideoInUse) == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient, "InterruptionReason::VideoInUse is not AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient as expected");
182     static_assert(static_cast<int>(InterruptionReason::AudioInUse) == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient, "InterruptionReason::AudioInUse is not AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient as expected");
183 #endif
184
185     [m_device.get() addObserver:m_objcObserver.get() forKeyPath:@"suspended" options:NSKeyValueObservingOptionNew context:(void *)nil];
186 }
187
188 AVVideoCaptureSource::~AVVideoCaptureSource()
189 {
190 #if PLATFORM(IOS_FAMILY)
191     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().unsetActiveSource(*this);
192 #endif
193     [m_objcObserver disconnect];
194     [m_device removeObserver:m_objcObserver.get() forKeyPath:@"suspended"];
195
196     if (!m_session)
197         return;
198
199     if ([m_session isRunning])
200         [m_session stopRunning];
201
202     clearSession();
203 }
204
205 void AVVideoCaptureSource::clearSession()
206 {
207     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
208     ASSERT(m_session);
209     [m_session removeObserver:m_objcObserver.get() forKeyPath:@"running"];
210     m_session = nullptr;
211 }
212
213 void AVVideoCaptureSource::startProducingData()
214 {
215     if (!m_session) {
216         if (!setupSession())
217             return;
218     }
219
220     if ([m_session isRunning])
221         return;
222
223     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
224     [m_objcObserver addNotificationObservers];
225     [m_session startRunning];
226 }
227
228 void AVVideoCaptureSource::stopProducingData()
229 {
230     if (!m_session)
231         return;
232
233     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
234     [m_objcObserver removeNotificationObservers];
235
236     if ([m_session isRunning])
237         [m_session stopRunning];
238
239     m_interruption = InterruptionReason::None;
240 #if PLATFORM(IOS_FAMILY)
241     clearSession();
242 #endif
243 }
244
245 void AVVideoCaptureSource::beginConfiguration()
246 {
247     if (m_session)
248         [m_session beginConfiguration];
249 }
250
251 void AVVideoCaptureSource::commitConfiguration()
252 {
253     if (m_session)
254         [m_session commitConfiguration];
255 }
256
257 void AVVideoCaptureSource::settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>)
258 {
259     m_currentSettings = WTF::nullopt;
260 }
261
262 const RealtimeMediaSourceSettings& AVVideoCaptureSource::settings()
263 {
264     if (m_currentSettings)
265         return *m_currentSettings;
266
267     RealtimeMediaSourceSettings settings;
268     if ([device() position] == AVCaptureDevicePositionFront)
269         settings.setFacingMode(RealtimeMediaSourceSettings::User);
270     else if ([device() position] == AVCaptureDevicePositionBack)
271         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
272     else
273         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
274
275     settings.setFrameRate(frameRate());
276
277     auto size = this->size();
278     if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
279         size = size.transposedSize();
280     
281     settings.setWidth(size.width());
282     settings.setHeight(size.height());
283     settings.setDeviceId(hashedId());
284
285     RealtimeMediaSourceSupportedConstraints supportedConstraints;
286     supportedConstraints.setSupportsDeviceId(true);
287     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
288     supportedConstraints.setSupportsWidth(true);
289     supportedConstraints.setSupportsHeight(true);
290     supportedConstraints.setSupportsAspectRatio(true);
291     supportedConstraints.setSupportsFrameRate(true);
292
293     settings.setSupportedConstraints(supportedConstraints);
294
295     m_currentSettings = WTFMove(settings);
296
297     return *m_currentSettings;
298 }
299
300 const RealtimeMediaSourceCapabilities& AVVideoCaptureSource::capabilities()
301 {
302     if (m_capabilities)
303         return *m_capabilities;
304
305     RealtimeMediaSourceCapabilities capabilities(settings().supportedConstraints());
306     capabilities.setDeviceId(hashedId());
307
308     AVCaptureDeviceTypedef *videoDevice = device();
309     if ([videoDevice position] == AVCaptureDevicePositionFront)
310         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
311     if ([videoDevice position] == AVCaptureDevicePositionBack)
312         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
313
314     updateCapabilities(capabilities);
315
316     m_capabilities = WTFMove(capabilities);
317
318     return *m_capabilities;
319 }
320
321 bool AVVideoCaptureSource::prefersPreset(VideoPreset& preset)
322 {
323 #if PLATFORM(IOS_FAMILY)
324     return [static_cast<AVVideoPreset*>(&preset)->format.get() isVideoBinned];
325 #else
326     UNUSED_PARAM(preset);
327 #endif
328
329     return true;
330 }
331
332 void AVVideoCaptureSource::setSizeAndFrameRateWithPreset(IntSize requestedSize, double requestedFrameRate, RefPtr<VideoPreset> preset)
333 {
334     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, SizeAndFrameRate { requestedSize.width(), requestedSize.height(), requestedFrameRate });
335
336     auto* avPreset = preset ? downcast<AVVideoPreset>(preset.get()) : nullptr;
337
338     if (!m_session) {
339         m_pendingPreset = avPreset;
340         m_pendingSize = requestedSize;
341         m_pendingFrameRate = requestedFrameRate;
342         return;
343     }
344
345     m_pendingPreset = nullptr;
346     m_pendingFrameRate = 0;
347
348     auto* frameRateRange = frameDurationForFrameRate(requestedFrameRate);
349     ASSERT(frameRateRange);
350     if (!frameRateRange)
351         return;
352
353     if (!avPreset)
354         return;
355
356     ASSERT(avPreset->format);
357
358     NSError *error = nil;
359     [m_session beginConfiguration];
360     @try {
361         if ([device() lockForConfiguration:&error]) {
362             if (!m_currentPreset || ![m_currentPreset->format.get() isEqual:avPreset->format.get()]) {
363                 [device() setActiveFormat:avPreset->format.get()];
364
365                 frameRateRange = frameDurationForFrameRate(requestedFrameRate);
366                 ASSERT(frameRateRange);
367                 if (!frameRateRange)
368                     return;
369
370 #if PLATFORM(MAC)
371                 auto settingsDictionary = @{
372                     (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(avVideoCapturePixelBufferFormat()),
373                     (__bridge NSString *)kCVPixelBufferWidthKey: @(avPreset->size.width()),
374                     (__bridge NSString *)kCVPixelBufferHeightKey: @(avPreset->size.height()),
375                     (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{ }
376                 };
377                 [m_videoOutput setVideoSettings:settingsDictionary];
378 #endif
379             }
380
381             ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, "setting frame rate ", CMTimeGetSeconds([frameRateRange minFrameDuration]), " .. ", CMTimeGetSeconds([frameRateRange maxFrameDuration]));
382             [device() setActiveVideoMinFrameDuration:[frameRateRange minFrameDuration]];
383             [device() setActiveVideoMaxFrameDuration:[frameRateRange maxFrameDuration]];
384             [device() unlockForConfiguration];
385         }
386     } @catch(NSException *exception) {
387         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "error configuring device ", [[exception name] UTF8String], ", reason : ", [[exception reason] UTF8String]);
388         return;
389     }
390     [m_session commitConfiguration];
391
392     m_currentPreset = avPreset;
393
394     ERROR_LOG_IF(error && loggerPtr(), LOGIDENTIFIER, [[error localizedDescription] UTF8String]);
395 }
396
397 static inline int sensorOrientation(AVCaptureVideoOrientation videoOrientation)
398 {
399 #if PLATFORM(IOS_FAMILY)
400     switch (videoOrientation) {
401     case AVCaptureVideoOrientationPortrait:
402         return 180;
403     case AVCaptureVideoOrientationPortraitUpsideDown:
404         return 0;
405     case AVCaptureVideoOrientationLandscapeRight:
406         return 90;
407     case AVCaptureVideoOrientationLandscapeLeft:
408         return -90;
409     }
410 #else
411     switch (videoOrientation) {
412     case AVCaptureVideoOrientationPortrait:
413         return 0;
414     case AVCaptureVideoOrientationPortraitUpsideDown:
415         return 180;
416     case AVCaptureVideoOrientationLandscapeRight:
417         return 90;
418     case AVCaptureVideoOrientationLandscapeLeft:
419         return -90;
420     }
421 #endif
422 }
423
424 static inline int sensorOrientationFromVideoOutput(AVCaptureVideoDataOutputType* videoOutput)
425 {
426     AVCaptureConnectionType* connection = [videoOutput connectionWithMediaType: getAVMediaTypeVideo()];
427     return connection ? sensorOrientation([connection videoOrientation]) : 0;
428 }
429
430 bool AVVideoCaptureSource::setupSession()
431 {
432     if (m_session)
433         return true;
434
435     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
436
437     m_session = adoptNS([allocAVCaptureSessionInstance() init]);
438     [m_session addObserver:m_objcObserver.get() forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:(void *)nil];
439
440     [m_session beginConfiguration];
441     bool success = setupCaptureSession();
442     [m_session commitConfiguration];
443
444     if (!success)
445         captureFailed();
446
447     return success;
448 }
449
450 AVFrameRateRangeType* AVVideoCaptureSource::frameDurationForFrameRate(double rate)
451 {
452     AVFrameRateRangeType *bestFrameRateRange = nil;
453     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
454         if (frameRateRangeIncludesRate({ [frameRateRange minFrameRate], [frameRateRange maxFrameRate] }, rate)) {
455             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
456                 bestFrameRateRange = frameRateRange;
457         }
458     }
459
460     if (!bestFrameRateRange)
461         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "no frame rate range for rate ", rate);
462
463     return bestFrameRateRange;
464 }
465
466 bool AVVideoCaptureSource::setupCaptureSession()
467 {
468     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
469
470 #if PLATFORM(IOS_FAMILY)
471     RealtimeMediaSourceCenter::singleton().videoCaptureFactory().setActiveSource(*this);
472 #endif
473
474     NSError *error = nil;
475     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
476     if (error) {
477         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "failed to allocate AVCaptureDeviceInput ", [[error localizedDescription] UTF8String]);
478         return false;
479     }
480
481     if (![session() canAddInput:videoIn.get()]) {
482         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "unable to add video input device");
483         return false;
484     }
485     [session() addInput:videoIn.get()];
486
487     m_videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
488     auto settingsDictionary = adoptNS([[NSMutableDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:avVideoCapturePixelBufferFormat()], kCVPixelBufferPixelFormatTypeKey, nil]);
489
490     [m_videoOutput setVideoSettings:settingsDictionary.get()];
491     [m_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
492     [m_videoOutput setSampleBufferDelegate:m_objcObserver.get() queue:globaVideoCaptureSerialQueue()];
493
494     if (![session() canAddOutput:m_videoOutput.get()]) {
495         ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "unable to add video output device");
496         return false;
497     }
498     [session() addOutput:m_videoOutput.get()];
499
500     if (m_pendingPreset || m_pendingFrameRate)
501         setSizeAndFrameRateWithPreset(m_pendingSize, m_pendingFrameRate, m_pendingPreset);
502
503     m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
504     computeSampleRotation();
505
506     return true;
507 }
508
509 void AVVideoCaptureSource::shutdownCaptureSession()
510 {
511     m_buffer = nullptr;
512 }
513
514 void AVVideoCaptureSource::monitorOrientation(OrientationNotifier& notifier)
515 {
516 #if PLATFORM(IOS_FAMILY)
517     notifier.addObserver(*this);
518     orientationChanged(notifier.orientation());
519 #else
520     UNUSED_PARAM(notifier);
521 #endif
522 }
523
524 void AVVideoCaptureSource::orientationChanged(int orientation)
525 {
526     ASSERT(orientation == 0 || orientation == 90 || orientation == -90 || orientation == 180);
527     m_deviceOrientation = orientation;
528     computeSampleRotation();
529 }
530
531 void AVVideoCaptureSource::computeSampleRotation()
532 {
533     bool frontCamera = [device() position] == AVCaptureDevicePositionFront;
534     MediaSample::VideoRotation sampleRotation;
535     switch (m_sensorOrientation - m_deviceOrientation) {
536     case 0:
537         sampleRotation = MediaSample::VideoRotation::None;
538         break;
539     case 180:
540     case -180:
541         sampleRotation = MediaSample::VideoRotation::UpsideDown;
542         break;
543     case 90:
544         sampleRotation = frontCamera ? MediaSample::VideoRotation::Left : MediaSample::VideoRotation::Right;
545         break;
546     case -90:
547     case -270:
548         sampleRotation = frontCamera ? MediaSample::VideoRotation::Right : MediaSample::VideoRotation::Left;
549         break;
550     default:
551         ASSERT_NOT_REACHED();
552         sampleRotation = MediaSample::VideoRotation::None;
553     }
554     if (sampleRotation == m_sampleRotation)
555         return;
556
557     m_sampleRotation = sampleRotation;
558     notifySettingsDidChangeObservers({ RealtimeMediaSourceSettings::Flag::Width, RealtimeMediaSourceSettings::Flag::Height });
559 }
560
561 void AVVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
562 {
563     if (!isProducingData() || muted())
564         return;
565
566     m_buffer = &sample.get();
567     setIntrinsicSize(expandedIntSize(sample->presentationSize()));
568     dispatchMediaSampleToObservers(WTFMove(sample));
569 }
570
571 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType* captureConnection)
572 {
573     if (m_framesToDropAtStartup && m_framesToDropAtStartup--)
574         return;
575
576     auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
577     scheduleDeferredTask([this, sample = WTFMove(sample)] () mutable {
578         processNewFrame(WTFMove(sample));
579     });
580 }
581
582 void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
583 {
584     scheduleDeferredTask([this, state] {
585         ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, state);
586         if ((state == m_isRunning) && (state == !muted()))
587             return;
588
589         m_isRunning = state;
590         if (m_isRunning)
591             m_framesToDropAtStartup = 4;
592
593         notifyMutedChange(!m_isRunning);
594     });
595 }
596
597 void AVVideoCaptureSource::captureDeviceSuspendedDidChange()
598 {
599 #if !PLATFORM(IOS_FAMILY)
600     scheduleDeferredTask([this] {
601         auto isSuspended = [m_device isSuspended];
602         ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, !!isSuspended);
603         if (isSuspended == muted())
604             return;
605
606         notifyMutedChange(isSuspended);
607     });
608 #endif
609 }
610
611 bool AVVideoCaptureSource::interrupted() const
612 {
613     if (m_interruption != InterruptionReason::None)
614         return true;
615
616     return RealtimeMediaSource::interrupted();
617 }
618
619 void AVVideoCaptureSource::generatePresets()
620 {
621     Vector<Ref<VideoPreset>> presets;
622     for (AVCaptureDeviceFormatType* format in [device() formats]) {
623
624         CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
625         IntSize size = { dimensions.width, dimensions.height };
626         auto index = presets.findMatching([&size](auto& preset) {
627             return size == preset->size;
628         });
629         if (index != notFound)
630             continue;
631
632         Vector<FrameRateRange> frameRates;
633         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges])
634             frameRates.append({ range.minFrameRate, range.maxFrameRate});
635
636         presets.append(AVVideoPreset::create(size, WTFMove(frameRates), format));
637     }
638
639     setSupportedPresets(WTFMove(presets));
640 }
641
642 #if PLATFORM(IOS_FAMILY)
643 void AVVideoCaptureSource::captureSessionRuntimeError(RetainPtr<NSError> error)
644 {
645     ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, [[error localizedDescription] UTF8String]);
646
647     if (!m_isRunning || error.get().code != AVErrorMediaServicesWereReset)
648         return;
649
650     // Try to restart the session, but reset m_isRunning immediately so if it fails we won't try again.
651     [m_session startRunning];
652     m_isRunning = [m_session isRunning];
653 }
654
655 void AVVideoCaptureSource::captureSessionBeginInterruption(RetainPtr<NSNotification> notification)
656 {
657     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER, [notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
658     m_interruption = static_cast<AVVideoCaptureSource::InterruptionReason>([notification.get().userInfo[AVCaptureSessionInterruptionReasonKey] integerValue]);
659 }
660
661 void AVVideoCaptureSource::captureSessionEndInterruption(RetainPtr<NSNotification>)
662 {
663     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
664
665     InterruptionReason reason = m_interruption;
666
667     m_interruption = InterruptionReason::None;
668     if (reason != InterruptionReason::VideoNotAllowedInSideBySide || m_isRunning || !m_session)
669         return;
670
671     [m_session startRunning];
672     m_isRunning = [m_session isRunning];
673 }
674 #endif
675
676 void AVVideoCaptureSource::deviceDisconnected(RetainPtr<NSNotification> notification)
677 {
678     ALWAYS_LOG_IF(loggerPtr(), LOGIDENTIFIER);
679     if (this->device() == [notification object])
680         captureFailed();
681 }
682
683
684 } // namespace WebCore
685
686 @implementation WebCoreAVVideoCaptureSourceObserver
687
688 - (id)initWithCallback:(AVVideoCaptureSource*)callback
689 {
690     self = [super init];
691     if (!self)
692         return nil;
693
694     m_callback = callback;
695
696     return self;
697 }
698
699 - (void)disconnect
700 {
701     [NSObject cancelPreviousPerformRequestsWithTarget:self];
702     [self removeNotificationObservers];
703     m_callback = nullptr;
704 }
705
706 - (void)addNotificationObservers
707 {
708     ASSERT(m_callback);
709
710     NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
711
712     [center addObserver:self selector:@selector(deviceConnectedDidChange:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
713
714 #if PLATFORM(IOS_FAMILY)
715     AVCaptureSessionType* session = m_callback->session();
716     [center addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:session];
717     [center addObserver:self selector:@selector(beginSessionInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:session];
718     [center addObserver:self selector:@selector(endSessionInterrupted:) name:AVCaptureSessionInterruptionEndedNotification object:session];
719 #endif
720 }
721
722 - (void)removeNotificationObservers
723 {
724     [[NSNotificationCenter defaultCenter] removeObserver:self];
725 }
726
727 - (void)captureOutput:(AVCaptureOutputType*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnectionType*)connection
728 {
729     if (!m_callback)
730         return;
731
732     m_callback->captureOutputDidOutputSampleBufferFromConnection(captureOutput, sampleBuffer, connection);
733 }
734
735 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary*)change context:(void*)context
736 {
737     UNUSED_PARAM(object);
738     UNUSED_PARAM(context);
739
740     if (!m_callback)
741         return;
742
743     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
744
745     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
746
747 #if !RELEASE_LOG_DISABLED
748     if (m_callback->loggerPtr() && m_callback->logger().willLog(m_callback->logChannel(), WTFLogLevelDebug)) {
749         auto identifier = Logger::LogSiteIdentifier("AVVideoCaptureSource", "observeValueForKeyPath", m_callback->logIdentifier());
750
751         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
752         m_callback->logger().debug(m_callback->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", [valueString.get() UTF8String]);
753     }
754 #endif
755
756     if (!willChange && [keyPath isEqualToString:@"running"])
757         m_callback->captureSessionIsRunningDidChange([newValue boolValue]);
758     if (!willChange && [keyPath isEqualToString:@"suspended"])
759         m_callback->captureDeviceSuspendedDidChange();
760 }
761
762 - (void)deviceConnectedDidChange:(NSNotification*)notification
763 {
764     if (m_callback)
765         m_callback->deviceDisconnected(notification);
766 }
767
768 #if PLATFORM(IOS_FAMILY)
769 - (void)sessionRuntimeError:(NSNotification*)notification
770 {
771     NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
772     if (m_callback)
773         m_callback->captureSessionRuntimeError(error);
774 }
775
776 - (void)beginSessionInterrupted:(NSNotification*)notification
777 {
778     if (m_callback)
779         m_callback->captureSessionBeginInterruption(notification);
780 }
781
782 - (void)endSessionInterrupted:(NSNotification*)notification
783 {
784     if (m_callback)
785         m_callback->captureSessionEndInterruption(notification);
786 }
787 #endif
788
789 @end
790
791 #endif // ENABLE(MEDIA_STREAM)