5c0f3dc061f05fc693a3787361efd8928f2209aa
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaSampleAVFObjC.mm
1 /*
2  * Copyright (C) 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaSampleAVFObjC.h"
28
29 #import "PixelBufferConformerCV.h"
30 #import <JavaScriptCore/JSCInlines.h>
31 #import <JavaScriptCore/TypedArrayInlines.h>
32 #import <wtf/PrintStream.h>
33 #import <wtf/cf/TypeCastsCF.h>
34
35 #import "CoreVideoSoftLink.h"
36 #import <pal/cf/CoreMediaSoftLink.h>
37
38 using namespace PAL;
39
40 WTF_DECLARE_CF_TYPE_TRAIT(CMSampleBuffer);
41
42 namespace WebCore {
43
44 static inline void releaseUint8Vector(void *array, const void*)
45 {
46     adoptMallocPtr(static_cast<uint8_t*>(array));
47 }
48
49 RefPtr<MediaSampleAVFObjC> MediaSampleAVFObjC::createImageSample(Vector<uint8_t>&& array, unsigned long width, unsigned long height)
50 {
51     CVPixelBufferRef pixelBuffer = nullptr;
52     auto status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, array.data(), width * 4, releaseUint8Vector, array.releaseBuffer().leakPtr(), NULL, &pixelBuffer);
53     auto imageBuffer = adoptCF(pixelBuffer);
54
55     ASSERT_UNUSED(status, !status);
56     if (!imageBuffer)
57         return nullptr;
58
59     CMVideoFormatDescriptionRef formatDescription = nullptr;
60     status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, imageBuffer.get(), &formatDescription);
61     ASSERT(!status);
62
63     CMSampleTimingInfo sampleTimingInformation = { kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid };
64
65     CMSampleBufferRef sampleBuffer;
66     status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, imageBuffer.get(), formatDescription, &sampleTimingInformation, &sampleBuffer);
67     CFRelease(formatDescription);
68     ASSERT(!status);
69
70     auto sample = adoptCF(sampleBuffer);
71
72     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sample.get(), true);
73     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
74         CFMutableDictionaryRef attachments = checked_cf_cast<CFMutableDictionaryRef>(CFArrayGetValueAtIndex(attachmentsArray, i));
75         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
76     }
77     return create(sample.get());
78 }
79
80 MediaTime MediaSampleAVFObjC::presentationTime() const
81 {
82     return PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get()));
83 }
84
85 MediaTime MediaSampleAVFObjC::outputPresentationTime() const
86 {
87     return PAL::toMediaTime(CMSampleBufferGetOutputPresentationTimeStamp(m_sample.get()));
88 }
89
90 MediaTime MediaSampleAVFObjC::decodeTime() const
91 {
92     return PAL::toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get()));
93 }
94
95 MediaTime MediaSampleAVFObjC::duration() const
96 {
97     return PAL::toMediaTime(CMSampleBufferGetDuration(m_sample.get()));
98 }
99
100 MediaTime MediaSampleAVFObjC::outputDuration() const
101 {
102     return PAL::toMediaTime(CMSampleBufferGetOutputDuration(m_sample.get()));
103 }
104
105 size_t MediaSampleAVFObjC::sizeInBytes() const
106 {
107     return CMSampleBufferGetTotalSampleSize(m_sample.get());
108 }
109
110 PlatformSample MediaSampleAVFObjC::platformSample()
111 {
112     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
113     return sample;
114 }
115
116 uint32_t MediaSampleAVFObjC::videoPixelFormat() const
117 {
118     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_sample.get()));
119     return CVPixelBufferGetPixelFormatType(pixelBuffer);
120 }
121
122 static bool isCMSampleBufferRandomAccess(CMSampleBufferRef sample)
123 {
124     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
125     if (!attachments)
126         return true;
127     
128     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
129         CFDictionaryRef attachmentDict = checked_cf_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, i));
130         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
131             return false;
132     }
133     return true;
134 }
135
136 static bool isCMSampleBufferNonDisplaying(CMSampleBufferRef sample)
137 {
138     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
139     if (!attachments)
140         return false;
141     
142     for (CFIndex i = 0; i < CFArrayGetCount(attachments); ++i) {
143         CFDictionaryRef attachmentDict = checked_cf_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(attachments, i));
144         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_DoNotDisplay))
145             return true;
146     }
147
148     return false;
149 }
150
151 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
152 {
153     int returnValue = MediaSample::None;
154     
155     if (isCMSampleBufferRandomAccess(m_sample.get()))
156         returnValue |= MediaSample::IsSync;
157
158     if (isCMSampleBufferNonDisplaying(m_sample.get()))
159         returnValue |= MediaSample::IsNonDisplaying;
160     
161     return SampleFlags(returnValue);
162 }
163
164 FloatSize MediaSampleAVFObjC::presentationSize() const
165 {
166     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(m_sample.get());
167     if (CMFormatDescriptionGetMediaType(formatDescription) != kCMMediaType_Video)
168         return FloatSize();
169     
170     return FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
171 }
172
173 void MediaSampleAVFObjC::dump(PrintStream& out) const
174 {
175     out.print("{PTS(", presentationTime(), "), OPTS(", outputPresentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), presentationSize(", presentationSize().width(), "x", presentationSize().height(), ")}");
176 }
177
178 void MediaSampleAVFObjC::offsetTimestampsBy(const MediaTime& offset)
179 {
180     CMItemCount itemCount = 0;
181     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
182         return;
183     
184     Vector<CMSampleTimingInfo> timingInfoArray;
185     timingInfoArray.grow(itemCount);
186     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
187         return;
188     
189     for (auto& timing : timingInfoArray) {
190         timing.presentationTimeStamp = PAL::toCMTime(PAL::toMediaTime(timing.presentationTimeStamp) + offset);
191         timing.decodeTimeStamp = PAL::toCMTime(PAL::toMediaTime(timing.decodeTimeStamp) + offset);
192     }
193     
194     CMSampleBufferRef newSample;
195     if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
196         return;
197     
198     m_sample = adoptCF(newSample);
199 }
200
201 void MediaSampleAVFObjC::setTimestamps(const WTF::MediaTime &presentationTimestamp, const WTF::MediaTime &decodeTimestamp)
202 {
203     CMItemCount itemCount = 0;
204     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
205         return;
206     
207     Vector<CMSampleTimingInfo> timingInfoArray;
208     timingInfoArray.grow(itemCount);
209     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
210         return;
211     
212     for (auto& timing : timingInfoArray) {
213         timing.presentationTimeStamp = PAL::toCMTime(presentationTimestamp);
214         timing.decodeTimeStamp = PAL::toCMTime(decodeTimestamp);
215     }
216     
217     CMSampleBufferRef newSample;
218     if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
219         return;
220     
221     m_sample = adoptCF(newSample);
222 }
223
224 bool MediaSampleAVFObjC::isDivisable() const
225 {
226     if (CMSampleBufferGetNumSamples(m_sample.get()) == 1)
227         return false;
228
229     if (CMSampleBufferGetSampleSizeArray(m_sample.get(), 0, nullptr, nullptr) == kCMSampleBufferError_BufferHasNoSampleSizes)
230         return false;
231
232     return true;
233 }
234
235 std::pair<RefPtr<MediaSample>, RefPtr<MediaSample>> MediaSampleAVFObjC::divide(const MediaTime& presentationTime)
236 {
237     if (!isDivisable())
238         return { nullptr, nullptr };
239
240     CFIndex samplesBeforePresentationTime = 0;
241
242     CMSampleBufferCallBlockForEachSample(m_sample.get(), [&] (CMSampleBufferRef sampleBuffer, CMItemCount) -> OSStatus {
243         if (PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) >= presentationTime)
244             return 1;
245         ++samplesBeforePresentationTime;
246         return noErr;
247     });
248
249     if (!samplesBeforePresentationTime)
250         return { nullptr, this };
251
252     CMItemCount sampleCount = CMSampleBufferGetNumSamples(m_sample.get());
253     if (samplesBeforePresentationTime >= sampleCount)
254         return { this, nullptr };
255
256     CMSampleBufferRef rawSampleBefore = nullptr;
257     CFRange rangeBefore = CFRangeMake(0, samplesBeforePresentationTime);
258     if (CMSampleBufferCopySampleBufferForRange(kCFAllocatorDefault, m_sample.get(), rangeBefore, &rawSampleBefore) != noErr)
259         return { nullptr, nullptr };
260     RetainPtr<CMSampleBufferRef> sampleBefore = adoptCF(rawSampleBefore);
261
262     CMSampleBufferRef rawSampleAfter = nullptr;
263     CFRange rangeAfter = CFRangeMake(samplesBeforePresentationTime, sampleCount - samplesBeforePresentationTime);
264     if (CMSampleBufferCopySampleBufferForRange(kCFAllocatorDefault, m_sample.get(), rangeAfter, &rawSampleAfter) != noErr)
265         return { nullptr, nullptr };
266     RetainPtr<CMSampleBufferRef> sampleAfter = adoptCF(rawSampleAfter);
267
268     return { MediaSampleAVFObjC::create(sampleBefore.get(), m_id), MediaSampleAVFObjC::create(sampleAfter.get(), m_id) };
269 }
270
271 Ref<MediaSample> MediaSampleAVFObjC::createNonDisplayingCopy() const
272 {
273     CMSampleBufferRef newSampleBuffer = 0;
274     CMSampleBufferCreateCopy(kCFAllocatorDefault, m_sample.get(), &newSampleBuffer);
275     ASSERT(newSampleBuffer);
276
277     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
278     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
279         CFMutableDictionaryRef attachments = checked_cf_cast<CFMutableDictionaryRef>(CFArrayGetValueAtIndex(attachmentsArray, i));
280         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
281     }
282
283     return MediaSampleAVFObjC::create(adoptCF(newSampleBuffer).get(), m_id);
284 }
285
286 RefPtr<JSC::Uint8ClampedArray> MediaSampleAVFObjC::getRGBAImageData() const
287 {
288 #if HAVE(CORE_VIDEO)
289     const OSType imageFormat = kCVPixelFormatType_32RGBA;
290     RetainPtr<CFNumberRef> imageFormatNumber = adoptCF(CFNumberCreate(nullptr,  kCFNumberIntType,  &imageFormat));
291
292     RetainPtr<CFMutableDictionaryRef> conformerOptions = adoptCF(CFDictionaryCreateMutable(0, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
293     CFDictionarySetValue(conformerOptions.get(), kCVPixelBufferPixelFormatTypeKey, imageFormatNumber.get());
294     PixelBufferConformerCV pixelBufferConformer(conformerOptions.get());
295
296     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_sample.get()));
297     auto rgbaPixelBuffer = pixelBufferConformer.convert(pixelBuffer);
298     auto status = CVPixelBufferLockBaseAddress(rgbaPixelBuffer.get(), kCVPixelBufferLock_ReadOnly);
299     ASSERT(status == noErr);
300
301     void* data = CVPixelBufferGetBaseAddressOfPlane(rgbaPixelBuffer.get(), 0);
302     size_t byteLength = CVPixelBufferGetHeight(pixelBuffer) * CVPixelBufferGetWidth(pixelBuffer) * 4;
303     auto result = JSC::Uint8ClampedArray::tryCreate(JSC::ArrayBuffer::create(data, byteLength), 0, byteLength);
304
305     status = CVPixelBufferUnlockBaseAddress(rgbaPixelBuffer.get(), kCVPixelBufferLock_ReadOnly);
306     ASSERT(status == noErr);
307
308     return result;
309 #else
310     return nullptr;
311 #endif
312 }
313
314 String MediaSampleAVFObjC::toJSONString() const
315 {
316     auto object = JSON::Object::create();
317
318     object->setObject("pts"_s, presentationTime().toJSONObject());
319     object->setObject("opts"_s, outputPresentationTime().toJSONObject());
320     object->setObject("dts"_s, decodeTime().toJSONObject());
321     object->setObject("duration"_s, duration().toJSONObject());
322     object->setInteger("flags"_s, static_cast<unsigned>(flags()));
323     object->setObject("presentationSize"_s, presentationSize().toJSONObject());
324
325     return object->toJSONString();
326 }
327
328 }