[MediaStream] Sync video preview layer and parent layer sizes
[WebKit-https.git] / Source / WebCore / platform / mediastream / mac / AVVideoCaptureSource.mm
1 /*
2  * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "AVVideoCaptureSource.h"
28
29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
30
31 #import "AVCaptureDeviceManager.h"
32 #import "GraphicsContextCG.h"
33 #import "ImageBuffer.h"
34 #import "IntRect.h"
35 #import "Logging.h"
36 #import "MediaConstraints.h"
37 #import "MediaSampleAVFObjC.h"
38 #import "NotImplemented.h"
39 #import "PlatformLayer.h"
40 #import "RealtimeMediaSourceCenter.h"
41 #import "RealtimeMediaSourcePreview.h"
42 #import "RealtimeMediaSourceSettings.h"
43 #import "WebActionDisablingCALayerDelegate.h"
44 #import <AVFoundation/AVFoundation.h>
45 #import <objc/runtime.h>
46
47 #if PLATFORM(IOS)
48 #include "WebCoreThread.h"
49 #include "WebCoreThreadRun.h"
50 #endif
51
52 #import "CoreMediaSoftLink.h"
53 #import "CoreVideoSoftLink.h"
54
55 typedef AVCaptureConnection AVCaptureConnectionType;
56 typedef AVCaptureDevice AVCaptureDeviceTypedef;
57 typedef AVCaptureDeviceFormat AVCaptureDeviceFormatType;
58 typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
59 typedef AVCaptureOutput AVCaptureOutputType;
60 typedef AVCaptureVideoDataOutput AVCaptureVideoDataOutputType;
61 typedef AVFrameRateRange AVFrameRateRangeType;
62 typedef AVCaptureVideoPreviewLayer AVCaptureVideoPreviewLayerType;
63
64 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
65
66 SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
67 SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
68 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceFormat)
69 SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
70 SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
71 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoDataOutput)
72 SOFT_LINK_CLASS(AVFoundation, AVCaptureVideoPreviewLayer)
73 SOFT_LINK_CLASS(AVFoundation, AVFrameRateRange)
74
75 #define AVCaptureConnection getAVCaptureConnectionClass()
76 #define AVCaptureDevice getAVCaptureDeviceClass()
77 #define AVCaptureDeviceFormat getAVCaptureDeviceFormatClass()
78 #define AVCaptureDeviceInput getAVCaptureDeviceInputClass()
79 #define AVCaptureOutput getAVCaptureOutputClass()
80 #define AVCaptureVideoDataOutput getAVCaptureVideoDataOutputClass()
81 #define AVCaptureVideoPreviewLayer getAVCaptureVideoPreviewLayerClass()
82 #define AVFrameRateRange getAVFrameRateRangeClass()
83
84 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset1280x720, NSString *)
85 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset960x540, NSString *)
86 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset640x480, NSString *)
87 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset352x288, NSString *)
88 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPreset320x240, NSString *)
89 SOFT_LINK_POINTER(AVFoundation, AVCaptureSessionPresetLow, NSString *)
90
91 #define AVCaptureSessionPreset1280x720 getAVCaptureSessionPreset1280x720()
92 #define AVCaptureSessionPreset960x540 getAVCaptureSessionPreset960x540()
93 #define AVCaptureSessionPreset640x480 getAVCaptureSessionPreset640x480()
94 #define AVCaptureSessionPreset352x288 getAVCaptureSessionPreset352x288()
95 #define AVCaptureSessionPreset320x240 getAVCaptureSessionPreset320x240()
96 #define AVCaptureSessionPresetLow getAVCaptureSessionPresetLow()
97
98 using namespace WebCore;
99
100 @interface WebCoreAVVideoCaptureSourceObserver : NSObject<CALayerDelegate> {
101     AVVideoSourcePreview *_parent;
102     BOOL _hasObserver;
103 }
104
105 - (void)setParent:(AVVideoSourcePreview *)parent;
106 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context;
107 @end
108
109 namespace WebCore {
110
111 class AVVideoSourcePreview: public AVMediaSourcePreview {
112 public:
113     static RefPtr<AVMediaSourcePreview> create(AVCaptureSession*, AVCaptureDeviceTypedef*, AVVideoCaptureSource*);
114
115     void backgroundLayerBoundsChanged();
116     PlatformLayer* platformLayer() const final { return m_previewBackgroundLayer.get(); }
117
118 private:
119     AVVideoSourcePreview(AVCaptureSession*, AVCaptureDeviceTypedef*, AVVideoCaptureSource*);
120
121     void invalidate() final;
122
123     void play() const final;
124     void pause() const final;
125     void setVolume(double) const final { };
126     void setEnabled(bool) final;
127     void setPaused(bool) const;
128
129     RetainPtr<AVCaptureVideoPreviewLayerType> m_previewLayer;
130     RetainPtr<PlatformLayer> m_previewBackgroundLayer;
131     RetainPtr<AVCaptureDeviceTypedef> m_device;
132     RetainPtr<WebCoreAVVideoCaptureSourceObserver> m_objcObserver;
133 };
134
135 RefPtr<AVMediaSourcePreview> AVVideoSourcePreview::create(AVCaptureSession *session, AVCaptureDeviceTypedef* device, AVVideoCaptureSource* parent)
136 {
137     return adoptRef(new AVVideoSourcePreview(session, device, parent));
138 }
139
140 AVVideoSourcePreview::AVVideoSourcePreview(AVCaptureSession *session, AVCaptureDeviceTypedef* device, AVVideoCaptureSource* parent)
141     : AVMediaSourcePreview(parent)
142     , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] init]))
143 {
144     m_device = device;
145     m_previewLayer = adoptNS([allocAVCaptureVideoPreviewLayerInstance() initWithSession:session]);
146     m_previewLayer.get().contentsGravity = kCAGravityResize;
147     m_previewLayer.get().anchorPoint = CGPointZero;
148     [m_previewLayer.get() setDelegate:[WebActionDisablingCALayerDelegate shared]];
149
150     m_previewBackgroundLayer = adoptNS([[CALayer alloc] init]);
151     m_previewBackgroundLayer.get().contentsGravity = kCAGravityResizeAspect;
152     m_previewBackgroundLayer.get().anchorPoint = CGPointZero;
153     m_previewBackgroundLayer.get().needsDisplayOnBoundsChange = YES;
154     [m_previewBackgroundLayer.get() setDelegate:[WebActionDisablingCALayerDelegate shared]];
155
156 #ifndef NDEBUG
157     m_previewLayer.get().name = @"AVVideoCaptureSource preview layer";
158     m_previewBackgroundLayer.get().name = @"AVVideoSourcePreview parent layer";
159 #endif
160
161     [m_previewBackgroundLayer addSublayer:m_previewLayer.get()];
162
163     [m_objcObserver.get() setParent:this];
164 }
165
166 void AVVideoSourcePreview::backgroundLayerBoundsChanged()
167 {
168     if (m_previewBackgroundLayer && m_previewLayer)
169         [m_previewLayer.get() setBounds:m_previewBackgroundLayer.get().bounds];
170 }
171
172 void AVVideoSourcePreview::invalidate()
173 {
174     [m_objcObserver.get() setParent:nil];
175     m_objcObserver = nullptr;
176     m_previewLayer = nullptr;
177     m_previewBackgroundLayer = nullptr;
178     m_device = nullptr;
179     AVMediaSourcePreview::invalidate();
180 }
181
182 void AVVideoSourcePreview::play() const
183 {
184     setPaused(false);
185 }
186
187 void AVVideoSourcePreview::pause() const
188 {
189     setPaused(true);
190 }
191
192 void AVVideoSourcePreview::setPaused(bool paused) const
193 {
194     [m_device lockForConfiguration:nil];
195     m_previewLayer.get().connection.enabled = !paused;
196     [m_device unlockForConfiguration];
197 }
198
199 void AVVideoSourcePreview::setEnabled(bool enabled)
200 {
201     m_previewLayer.get().hidden = !enabled;
202 }
203
204 const OSType videoCaptureFormat = kCVPixelFormatType_32BGRA;
205
206 RefPtr<AVMediaCaptureSource> AVVideoCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString& id, const MediaConstraints* constraints, String& invalidConstraint)
207 {
208     auto source = adoptRef(new AVVideoCaptureSource(device, id));
209     if (constraints) {
210         auto result = source->applyConstraints(*constraints);
211         if (result) {
212             invalidConstraint = result.value().first;
213             source = nullptr;
214         }
215     }
216
217     return source;
218 }
219
220 AVVideoCaptureSource::AVVideoCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id)
221     : AVMediaCaptureSource(device, id, RealtimeMediaSource::Video)
222 {
223 }
224
225 AVVideoCaptureSource::~AVVideoCaptureSource()
226 {
227 }
228
229 static void updateSizeMinMax(int& min, int& max, int value)
230 {
231     min = std::min<int>(min, value);
232     max = std::max<int>(max, value);
233 }
234
235 static void updateAspectRatioMinMax(double& min, double& max, double value)
236 {
237     min = std::min<double>(min, value);
238     max = std::max<double>(max, value);
239 }
240
241 void AVVideoCaptureSource::initializeCapabilities(RealtimeMediaSourceCapabilities& capabilities)
242 {
243     AVCaptureDeviceTypedef *videoDevice = device();
244
245     if ([videoDevice position] == AVCaptureDevicePositionFront)
246         capabilities.addFacingMode(RealtimeMediaSourceSettings::User);
247     if ([videoDevice position] == AVCaptureDevicePositionBack)
248         capabilities.addFacingMode(RealtimeMediaSourceSettings::Environment);
249
250     Float64 lowestFrameRateRange = std::numeric_limits<double>::infinity();
251     Float64 highestFrameRateRange = 0;
252     int minimumWidth = std::numeric_limits<int>::infinity();
253     int maximumWidth = 0;
254     int minimumHeight = std::numeric_limits<int>::infinity();
255     int maximumHeight = 0;
256     double minimumAspectRatio = std::numeric_limits<double>::infinity();
257     double maximumAspectRatio = 0;
258
259     for (AVCaptureDeviceFormatType *format in [videoDevice formats]) {
260
261         for (AVFrameRateRangeType *range in [format videoSupportedFrameRateRanges]) {
262             lowestFrameRateRange = std::min<Float64>(lowestFrameRateRange, range.minFrameRate);
263             highestFrameRateRange = std::max<Float64>(highestFrameRateRange, range.maxFrameRate);
264         }
265
266         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) {
267             updateSizeMinMax(minimumWidth, maximumWidth, 1280);
268             updateSizeMinMax(minimumHeight, maximumHeight, 720);
269             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 1280.0 / 720);
270         }
271         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540]) {
272             updateSizeMinMax(minimumWidth, maximumWidth, 960);
273             updateSizeMinMax(minimumHeight, maximumHeight, 540);
274             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 960 / 540);
275         }
276         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480]) {
277             updateSizeMinMax(minimumWidth, maximumWidth, 640);
278             updateSizeMinMax(minimumHeight, maximumHeight, 480);
279             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 640 / 480);
280         }
281         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288]) {
282             updateSizeMinMax(minimumWidth, maximumWidth, 352);
283             updateSizeMinMax(minimumHeight, maximumHeight, 288);
284             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 352 / 288);
285         }
286         if ([videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240]) {
287             updateSizeMinMax(minimumWidth, maximumWidth, 320);
288             updateSizeMinMax(minimumHeight, maximumHeight, 240);
289             updateAspectRatioMinMax(minimumAspectRatio, maximumAspectRatio, 320 / 240);
290         }
291     }
292
293     capabilities.setFrameRate(CapabilityValueOrRange(lowestFrameRateRange, highestFrameRateRange));
294     capabilities.setWidth(CapabilityValueOrRange(minimumWidth, maximumWidth));
295     capabilities.setHeight(CapabilityValueOrRange(minimumHeight, maximumHeight));
296     capabilities.setAspectRatio(CapabilityValueOrRange(minimumAspectRatio, maximumAspectRatio));
297 }
298
299 void AVVideoCaptureSource::initializeSupportedConstraints(RealtimeMediaSourceSupportedConstraints& supportedConstraints)
300 {
301     supportedConstraints.setSupportsFacingMode([device() position] != AVCaptureDevicePositionUnspecified);
302     supportedConstraints.setSupportsWidth(true);
303     supportedConstraints.setSupportsHeight(true);
304     supportedConstraints.setSupportsAspectRatio(true);
305     supportedConstraints.setSupportsFrameRate(true);
306 }
307
308 void AVVideoCaptureSource::updateSettings(RealtimeMediaSourceSettings& settings)
309 {
310     settings.setDeviceId(id());
311
312     if ([device() position] == AVCaptureDevicePositionFront)
313         settings.setFacingMode(RealtimeMediaSourceSettings::User);
314     else if ([device() position] == AVCaptureDevicePositionBack)
315         settings.setFacingMode(RealtimeMediaSourceSettings::Environment);
316     else
317         settings.setFacingMode(RealtimeMediaSourceSettings::Unknown);
318     
319     settings.setFrameRate(m_frameRate);
320     settings.setWidth(m_width);
321     settings.setHeight(m_height);
322     settings.setAspectRatio(static_cast<float>(m_width) / m_height);
323 }
324
325 bool AVVideoCaptureSource::applySize(const IntSize& size)
326 {
327     NSString *preset = bestSessionPresetForVideoDimensions(size.width(), size.height());
328     if (!preset || ![session() canSetSessionPreset:preset]) {
329         LOG(Media, "AVVideoCaptureSource::applySize(%p), unable find or set preset for width: %i, height: %i", this, size.width(), size.height());
330         return false;
331     }
332
333     return setPreset(preset);
334 }
335
336 bool AVVideoCaptureSource::setPreset(NSString *preset)
337 {
338     if (!session()) {
339         m_pendingPreset = preset;
340         return true;
341     }
342     m_pendingPreset = nullptr;
343     if (!preset)
344         return true;
345
346     @try {
347         session().sessionPreset = preset;
348     } @catch(NSException *exception) {
349         LOG(Media, "AVVideoCaptureSource::applySize(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
350         return false;
351     }
352
353     return true;
354 }
355
356 bool AVVideoCaptureSource::applyFrameRate(double rate)
357 {
358     AVFrameRateRangeType *bestFrameRateRange = nil;
359     for (AVFrameRateRangeType *frameRateRange in [[device() activeFormat] videoSupportedFrameRateRanges]) {
360         if (rate >= [frameRateRange minFrameRate] && rate <= [frameRateRange maxFrameRate]) {
361             if (!bestFrameRateRange || CMTIME_COMPARE_INLINE([frameRateRange minFrameDuration], >, [bestFrameRateRange minFrameDuration]))
362                 bestFrameRateRange = frameRateRange;
363         }
364     }
365
366     if (!bestFrameRateRange) {
367         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), frame rate %f not supported by video device", this, rate);
368         return false;
369     }
370
371     NSError *error = nil;
372     @try {
373         if ([device() lockForConfiguration:&error]) {
374             [device() setActiveVideoMinFrameDuration:[bestFrameRateRange minFrameDuration]];
375             [device() unlockForConfiguration];
376         }
377     } @catch(NSException *exception) {
378         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), exception thrown configuring device: <%s> %s", this, [[exception name] UTF8String], [[exception reason] UTF8String]);
379         return false;
380     }
381
382     if (error) {
383         LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p), failed to lock video device for configuration: %s", this, [[error localizedDescription] UTF8String]);
384         return false;
385     }
386
387     LOG(Media, "AVVideoCaptureSource::applyFrameRate(%p) - set frame rate range to %f", this, rate);
388     return true;
389 }
390
391 void AVVideoCaptureSource::applySizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
392 {
393     setPreset(bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)));
394
395     if (frameRate)
396         applyFrameRate(frameRate.value());
397 }
398
399 void AVVideoCaptureSource::setupCaptureSession()
400 {
401     if (m_pendingPreset)
402         setPreset(m_pendingPreset.get());
403
404     NSError *error = nil;
405     RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
406     if (error) {
407         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
408         return;
409     }
410
411     if (![session() canAddInput:videoIn.get()]) {
412         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
413         return;
414     }
415     [session() addInput:videoIn.get()];
416
417     RetainPtr<AVCaptureVideoDataOutputType> videoOutput = adoptNS([allocAVCaptureVideoDataOutputInstance() init]);
418     RetainPtr<NSDictionary> settingsDictionary = adoptNS([[NSDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:videoCaptureFormat], kCVPixelBufferPixelFormatTypeKey, nil]);
419     [videoOutput setVideoSettings:settingsDictionary.get()];
420     [videoOutput setAlwaysDiscardsLateVideoFrames:YES];
421     setVideoSampleBufferDelegate(videoOutput.get());
422
423     if (![session() canAddOutput:videoOutput.get()]) {
424         LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
425         return;
426     }
427     [session() addOutput:videoOutput.get()];
428 }
429
430 void AVVideoCaptureSource::shutdownCaptureSession()
431 {
432     m_buffer = nullptr;
433     m_lastImage = nullptr;
434     m_videoFrameTimeStamps.clear();
435     m_frameRate = 0;
436     m_width = 0;
437     m_height = 0;
438 }
439
440 bool AVVideoCaptureSource::updateFramerate(CMSampleBufferRef sampleBuffer)
441 {
442     CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
443     if (!CMTIME_IS_NUMERIC(sampleTime))
444         return false;
445
446     Float64 frameTime = CMTimeGetSeconds(sampleTime);
447     Float64 oneSecondAgo = frameTime - 1;
448     
449     m_videoFrameTimeStamps.append(frameTime);
450     
451     while (m_videoFrameTimeStamps[0] < oneSecondAgo)
452         m_videoFrameTimeStamps.remove(0);
453
454     Float64 frameRate = m_frameRate;
455     m_frameRate = (m_frameRate + m_videoFrameTimeStamps.size()) / 2;
456
457     return frameRate != m_frameRate;
458 }
459
460 void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer)
461 {
462     // Ignore frames delivered when the session is not running, we want to hang onto the last image
463     // delivered before it stopped.
464     if (m_lastImage && (!isProducingData() || muted()))
465         return;
466
467     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer.get());
468     if (!formatDescription)
469         return;
470
471     updateFramerate(sampleBuffer.get());
472
473     CMSampleBufferRef newSampleBuffer = 0;
474     CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer.get(), &newSampleBuffer);
475     ASSERT(newSampleBuffer);
476
477     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
478     if (attachmentsArray) {
479         for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
480             CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
481             CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
482         }
483     }
484
485     m_buffer = adoptCF(newSampleBuffer);
486     m_lastImage = nullptr;
487
488     bool settingsChanged = false;
489     CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
490     if (dimensions.width != m_width || dimensions.height != m_height) {
491         m_width = dimensions.width;
492         m_height = dimensions.height;
493         settingsChanged = true;
494     }
495
496     if (settingsChanged)
497         settingsDidChange();
498
499     mediaDataUpdated(MediaSampleAVFObjC::create(m_buffer.get()));
500 }
501
502 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType*)
503 {
504     RetainPtr<CMSampleBufferRef> buffer = sampleBuffer;
505
506     scheduleDeferredTask([this, buffer] {
507         this->processNewFrame(buffer);
508     });
509 }
510
511 RefPtr<Image> AVVideoCaptureSource::currentFrameImage()
512 {
513     if (!currentFrameCGImage())
514         return nullptr;
515
516     FloatRect imageRect(0, 0, m_width, m_height);
517     std::unique_ptr<ImageBuffer> imageBuffer = ImageBuffer::create(imageRect.size(), Unaccelerated);
518
519     if (!imageBuffer)
520         return nullptr;
521
522     paintCurrentFrameInContext(imageBuffer->context(), imageRect);
523
524     return ImageBuffer::sinkIntoImage(WTFMove(imageBuffer));
525 }
526
527 RetainPtr<CGImageRef> AVVideoCaptureSource::currentFrameCGImage()
528 {
529     if (m_lastImage)
530         return m_lastImage;
531
532     if (!m_buffer)
533         return nullptr;
534
535     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_buffer.get()));
536     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == videoCaptureFormat);
537
538     CVPixelBufferLockBaseAddress(pixelBuffer, 0);
539     void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
540     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
541     size_t width = CVPixelBufferGetWidth(pixelBuffer);
542     size_t height = CVPixelBufferGetHeight(pixelBuffer);
543
544     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateWithData(NULL, baseAddress, bytesPerRow * height, NULL));
545     m_lastImage = adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, sRGBColorSpaceRef(), kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, provider.get(), NULL, true, kCGRenderingIntentDefault));
546
547     CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
548
549     return m_lastImage;
550 }
551
552 void AVVideoCaptureSource::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
553 {
554     if (context.paintingDisabled() || !currentFrameCGImage())
555         return;
556
557     GraphicsContextStateSaver stateSaver(context);
558     context.translate(rect.x(), rect.y() + rect.height());
559     context.scale(FloatSize(1, -1));
560     context.setImageInterpolationQuality(InterpolationLow);
561     IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
562     CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), m_lastImage.get());
563 }
564
565 RefPtr<AVMediaSourcePreview> AVVideoCaptureSource::createPreview()
566 {
567     return AVVideoSourcePreview::create(session(), device(), this);
568 }
569
570 NSString *AVVideoCaptureSource::bestSessionPresetForVideoDimensions(std::optional<int> width, std::optional<int> height) const
571 {
572     if (!width && !height)
573         return nil;
574
575     AVCaptureDeviceTypedef *videoDevice = device();
576     if ((!width || width.value() == 1280) && (!height || height.value() == 720))
577         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720] ? AVCaptureSessionPreset1280x720 : nil;
578
579     if ((!width || width.value() == 960) && (!height || height.value() == 540 ))
580         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset960x540] ? AVCaptureSessionPreset960x540 : nil;
581
582     if ((!width || width.value() == 640) && (!height || height.value() == 480 ))
583         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset640x480] ? AVCaptureSessionPreset640x480 : nil;
584
585     if ((!width || width.value() == 352) && (!height || height.value() == 288 ))
586         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset352x288] ? AVCaptureSessionPreset352x288 : nil;
587
588     if ((!width || width.value() == 320) && (!height || height.value() == 240 ))
589         return [videoDevice supportsAVCaptureSessionPreset:AVCaptureSessionPreset320x240] ? AVCaptureSessionPreset320x240 : nil;
590
591     return nil;
592 }
593
594 bool AVVideoCaptureSource::supportsSizeAndFrameRate(std::optional<int> width, std::optional<int> height, std::optional<double> frameRate)
595 {
596     if (!height && !width && !frameRate)
597         return true;
598
599     if ((height || width) && !bestSessionPresetForVideoDimensions(WTFMove(width), WTFMove(height)))
600         return false;
601
602     if (!frameRate)
603         return true;
604
605     double rate = frameRate.value();
606     for (AVFrameRateRangeType *range in [[device() activeFormat] videoSupportedFrameRateRanges]) {
607         if (rate >= range.minFrameRate && rate <= range.maxFrameRate)
608             return true;
609     }
610
611     return false;
612 }
613
614 } // namespace WebCore
615
616 @implementation WebCoreAVVideoCaptureSourceObserver
617
618 static NSString * const KeyValueBoundsChangeKey = @"bounds";
619
620 - (void)setParent:(AVVideoSourcePreview *)parent
621 {
622     if (_parent && _hasObserver && _parent->platformLayer()) {
623         _hasObserver = false;
624         [_parent->platformLayer() removeObserver:self forKeyPath:KeyValueBoundsChangeKey];
625     }
626
627     _parent = parent;
628
629     if (_parent && _parent->platformLayer()) {
630         _hasObserver = true;
631         [_parent->platformLayer() addObserver:self forKeyPath:KeyValueBoundsChangeKey options:0 context:nullptr];
632     }
633 }
634
635 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
636 {
637     UNUSED_PARAM(context);
638
639     if (!_parent)
640         return;
641
642     if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
643         return;
644
645 #if PLATFORM(IOS)
646     WebThreadRun(^ {
647         if ([keyPath isEqual:KeyValueBoundsChangeKey] && object == _parent->platformLayer())
648             _parent->backgroundLayerBoundsChanged();
649     });
650 #else
651     if ([keyPath isEqual:KeyValueBoundsChangeKey] && object == _parent->platformLayer())
652         _parent->backgroundLayerBoundsChanged();
653 #endif
654 }
655
656 @end
657
658 #endif // ENABLE(MEDIA_STREAM)