Fix CMake build.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaSampleAVFObjC.mm
1 /*
2  * Copyright (C) 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaSampleAVFObjC.h"
28
29 #import "PixelBufferConformerCV.h"
30 #import <runtime/JSCInlines.h>
31 #import <runtime/TypedArrayInlines.h>
32 #import <wtf/PrintStream.h>
33
34 #import "CoreMediaSoftLink.h"
35 #import "CoreVideoSoftLink.h"
36
37 namespace WebCore {
38
39 static inline void releaseUint8Vector(void *array, const void*)
40 {
41     adoptMallocPtr(static_cast<uint8_t*>(array));
42 }
43
44 RefPtr<MediaSampleAVFObjC> MediaSampleAVFObjC::createImageSample(Vector<uint8_t>&& array, unsigned long width, unsigned long height)
45 {
46     CVPixelBufferRef imageBuffer = nullptr;
47     auto status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, array.data(), width * 4, releaseUint8Vector, array.releaseBuffer().leakPtr(), NULL, &imageBuffer);
48
49     ASSERT_UNUSED(status, !status);
50     if (!imageBuffer)
51         return nullptr;
52
53     CMVideoFormatDescriptionRef formatDescription = nullptr;
54     status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, imageBuffer, &formatDescription);
55     ASSERT(!status);
56
57     CMSampleTimingInfo sampleTimingInformation = { kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid };
58
59     CMSampleBufferRef sample;
60     status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, imageBuffer, formatDescription, &sampleTimingInformation, &sample);
61     ASSERT(!status);
62
63     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sample, true);
64     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
65         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
66         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
67     }
68     return create(sample);
69 }
70
71 MediaTime MediaSampleAVFObjC::presentationTime() const
72 {
73     return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get()));
74 }
75
76 MediaTime MediaSampleAVFObjC::outputPresentationTime() const
77 {
78     return toMediaTime(CMSampleBufferGetOutputPresentationTimeStamp(m_sample.get()));
79 }
80
81 MediaTime MediaSampleAVFObjC::decodeTime() const
82 {
83     return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get()));
84 }
85
86 MediaTime MediaSampleAVFObjC::duration() const
87 {
88     return toMediaTime(CMSampleBufferGetDuration(m_sample.get()));
89 }
90
91 MediaTime MediaSampleAVFObjC::outputDuration() const
92 {
93     return toMediaTime(CMSampleBufferGetOutputDuration(m_sample.get()));
94 }
95
96 size_t MediaSampleAVFObjC::sizeInBytes() const
97 {
98     return CMSampleBufferGetTotalSampleSize(m_sample.get());
99 }
100
101 PlatformSample MediaSampleAVFObjC::platformSample()
102 {
103     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
104     return sample;
105 }
106
107 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
108 {
109     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
110     if (!attachments)
111         return true;
112     
113     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
114         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
115         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
116             return false;
117     }
118     return true;
119 }
120
121 static bool CMSampleBufferIsNonDisplaying(CMSampleBufferRef sample)
122 {
123     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
124     if (!attachments)
125         return false;
126     
127     for (CFIndex i = 0; i < CFArrayGetCount(attachments); ++i) {
128         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
129         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_DoNotDisplay))
130             return true;
131     }
132
133     return false;
134 }
135
136 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
137 {
138     int returnValue = MediaSample::None;
139     
140     if (CMSampleBufferIsRandomAccess(m_sample.get()))
141         returnValue |= MediaSample::IsSync;
142
143     if (CMSampleBufferIsNonDisplaying(m_sample.get()))
144         returnValue |= MediaSample::IsNonDisplaying;
145     
146     return SampleFlags(returnValue);
147 }
148
149 FloatSize MediaSampleAVFObjC::presentationSize() const
150 {
151     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(m_sample.get());
152     if (CMFormatDescriptionGetMediaType(formatDescription) != kCMMediaType_Video)
153         return FloatSize();
154     
155     return FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
156 }
157
158 void MediaSampleAVFObjC::dump(PrintStream& out) const
159 {
160     out.print("{PTS(", presentationTime(), "), OPTS(", outputPresentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), presentationSize(", presentationSize().width(), "x", presentationSize().height(), ")}");
161 }
162
163 void MediaSampleAVFObjC::offsetTimestampsBy(const MediaTime& offset)
164 {
165     CMItemCount itemCount = 0;
166     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
167         return;
168     
169     Vector<CMSampleTimingInfo> timingInfoArray;
170     timingInfoArray.grow(itemCount);
171     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
172         return;
173     
174     for (auto& timing : timingInfoArray) {
175         timing.presentationTimeStamp = toCMTime(toMediaTime(timing.presentationTimeStamp) + offset);
176         timing.decodeTimeStamp = toCMTime(toMediaTime(timing.decodeTimeStamp) + offset);
177     }
178     
179     CMSampleBufferRef newSample;
180     if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
181         return;
182     
183     m_sample = adoptCF(newSample);
184 }
185
186 void MediaSampleAVFObjC::setTimestamps(const WTF::MediaTime &presentationTimestamp, const WTF::MediaTime &decodeTimestamp)
187 {
188     CMItemCount itemCount = 0;
189     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
190         return;
191     
192     Vector<CMSampleTimingInfo> timingInfoArray;
193     timingInfoArray.grow(itemCount);
194     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
195         return;
196     
197     for (auto& timing : timingInfoArray) {
198         timing.presentationTimeStamp = toCMTime(presentationTimestamp);
199         timing.decodeTimeStamp = toCMTime(decodeTimestamp);
200     }
201     
202     CMSampleBufferRef newSample;
203     if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
204         return;
205     
206     m_sample = adoptCF(newSample);
207 }
208
209 bool MediaSampleAVFObjC::isDivisable() const
210 {
211     if (CMSampleBufferGetNumSamples(m_sample.get()) == 1)
212         return false;
213
214     if (CMSampleBufferGetSampleSizeArray(m_sample.get(), 0, nullptr, nullptr) == kCMSampleBufferError_BufferHasNoSampleSizes)
215         return false;
216
217     return true;
218 }
219
220 std::pair<RefPtr<MediaSample>, RefPtr<MediaSample>> MediaSampleAVFObjC::divide(const MediaTime& presentationTime)
221 {
222     if (!isDivisable())
223         return { nullptr, nullptr };
224
225     CFIndex samplesBeforePresentationTime = 0;
226
227     CMSampleBufferCallBlockForEachSample(m_sample.get(), [&] (CMSampleBufferRef sampleBuffer, CMItemCount) -> OSStatus {
228         if (toMediaTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) >= presentationTime)
229             return 1;
230         ++samplesBeforePresentationTime;
231         return noErr;
232     });
233
234     if (!samplesBeforePresentationTime)
235         return { nullptr, this };
236
237     CMItemCount sampleCount = CMSampleBufferGetNumSamples(m_sample.get());
238     if (samplesBeforePresentationTime >= sampleCount)
239         return { this, nullptr };
240
241     CMSampleBufferRef rawSampleBefore = nullptr;
242     CFRange rangeBefore = CFRangeMake(0, samplesBeforePresentationTime);
243     if (CMSampleBufferCopySampleBufferForRange(kCFAllocatorDefault, m_sample.get(), rangeBefore, &rawSampleBefore) != noErr)
244         return { nullptr, nullptr };
245     RetainPtr<CMSampleBufferRef> sampleBefore = adoptCF(rawSampleBefore);
246
247     CMSampleBufferRef rawSampleAfter = nullptr;
248     CFRange rangeAfter = CFRangeMake(samplesBeforePresentationTime, sampleCount - samplesBeforePresentationTime);
249     if (CMSampleBufferCopySampleBufferForRange(kCFAllocatorDefault, m_sample.get(), rangeAfter, &rawSampleAfter) != noErr)
250         return { nullptr, nullptr };
251     RetainPtr<CMSampleBufferRef> sampleAfter = adoptCF(rawSampleAfter);
252
253     return { MediaSampleAVFObjC::create(sampleBefore.get(), m_id), MediaSampleAVFObjC::create(sampleAfter.get(), m_id) };
254 }
255
256 Ref<MediaSample> MediaSampleAVFObjC::createNonDisplayingCopy() const
257 {
258     CMSampleBufferRef newSampleBuffer = 0;
259     CMSampleBufferCreateCopy(kCFAllocatorDefault, m_sample.get(), &newSampleBuffer);
260     ASSERT(newSampleBuffer);
261
262     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
263     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
264         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
265         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
266     }
267
268     return MediaSampleAVFObjC::create(adoptCF(newSampleBuffer).get(), m_id);
269 }
270
271 RefPtr<JSC::Uint8ClampedArray> MediaSampleAVFObjC::getRGBAImageData() const
272 {
273     const OSType imageFormat = kCVPixelFormatType_32RGBA;
274     RetainPtr<CFNumberRef> imageFormatNumber = adoptCF(CFNumberCreate(nullptr,  kCFNumberIntType,  &imageFormat));
275
276     RetainPtr<CFMutableDictionaryRef> conformerOptions = adoptCF(CFDictionaryCreateMutable(0, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
277     CFDictionarySetValue(conformerOptions.get(), kCVPixelBufferPixelFormatTypeKey, imageFormatNumber.get());
278     PixelBufferConformerCV pixelBufferConformer(conformerOptions.get());
279
280     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_sample.get()));
281     auto rgbaPixelBuffer = pixelBufferConformer.convert(pixelBuffer);
282     auto status = CVPixelBufferLockBaseAddress(rgbaPixelBuffer.get(), kCVPixelBufferLock_ReadOnly);
283     ASSERT(status == noErr);
284
285     void* data = CVPixelBufferGetBaseAddressOfPlane(rgbaPixelBuffer.get(), 0);
286     size_t byteLength = CVPixelBufferGetHeight(pixelBuffer) * CVPixelBufferGetWidth(pixelBuffer) * 4;
287     auto result = JSC::Uint8ClampedArray::create(JSC::ArrayBuffer::create(data, byteLength), 0, byteLength);
288
289     status = CVPixelBufferUnlockBaseAddress(rgbaPixelBuffer.get(), kCVPixelBufferLock_ReadOnly);
290     ASSERT(status == noErr);
291
292     return result;
293 }
294
295 }