17864ad2a8bc28cdb81f6babcae72f4dbb5aef31
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaSampleAVFObjC.mm
1 /*
2  * Copyright (C) 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaSampleAVFObjC.h"
28
29 #import "PixelBufferConformerCV.h"
30 #import <runtime/TypedArrayInlines.h>
31 #import <wtf/PrintStream.h>
32
33 #import "CoreMediaSoftLink.h"
34 #import "CoreVideoSoftLink.h"
35
36 namespace WebCore {
37
38 static inline void releaseUint8Vector(void *array, const void*)
39 {
40     adoptMallocPtr(static_cast<uint8_t*>(array));
41 }
42
43 RefPtr<MediaSampleAVFObjC> MediaSampleAVFObjC::createImageSample(Vector<uint8_t>&& array, unsigned long width, unsigned long height)
44 {
45     CVPixelBufferRef imageBuffer = nullptr;
46     auto status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, array.data(), width * 4, releaseUint8Vector, array.releaseBuffer().leakPtr(), NULL, &imageBuffer);
47
48     ASSERT_UNUSED(status, !status);
49     if (!imageBuffer)
50         return nullptr;
51
52     CMVideoFormatDescriptionRef formatDescription = nullptr;
53     status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, imageBuffer, &formatDescription);
54     ASSERT(!status);
55
56     CMSampleTimingInfo sampleTimingInformation = { kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid };
57
58     CMSampleBufferRef sample;
59     status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, imageBuffer, formatDescription, &sampleTimingInformation, &sample);
60     ASSERT(!status);
61
62     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sample, true);
63     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
64         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
65         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
66     }
67     return create(sample);
68 }
69
70 MediaTime MediaSampleAVFObjC::presentationTime() const
71 {
72     return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get()));
73 }
74
75 MediaTime MediaSampleAVFObjC::outputPresentationTime() const
76 {
77     return toMediaTime(CMSampleBufferGetOutputPresentationTimeStamp(m_sample.get()));
78 }
79
80 MediaTime MediaSampleAVFObjC::decodeTime() const
81 {
82     return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get()));
83 }
84
85 MediaTime MediaSampleAVFObjC::duration() const
86 {
87     return toMediaTime(CMSampleBufferGetDuration(m_sample.get()));
88 }
89
90 MediaTime MediaSampleAVFObjC::outputDuration() const
91 {
92     return toMediaTime(CMSampleBufferGetOutputDuration(m_sample.get()));
93 }
94
95 size_t MediaSampleAVFObjC::sizeInBytes() const
96 {
97     return CMSampleBufferGetTotalSampleSize(m_sample.get());
98 }
99
100 PlatformSample MediaSampleAVFObjC::platformSample()
101 {
102     PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
103     return sample;
104 }
105
106 static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
107 {
108     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
109     if (!attachments)
110         return true;
111     
112     for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
113         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
114         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
115             return false;
116     }
117     return true;
118 }
119
120 static bool CMSampleBufferIsNonDisplaying(CMSampleBufferRef sample)
121 {
122     CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
123     if (!attachments)
124         return false;
125     
126     for (CFIndex i = 0; i < CFArrayGetCount(attachments); ++i) {
127         CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
128         if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_DoNotDisplay))
129             return true;
130     }
131
132     return false;
133 }
134
135 MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
136 {
137     int returnValue = MediaSample::None;
138     
139     if (CMSampleBufferIsRandomAccess(m_sample.get()))
140         returnValue |= MediaSample::IsSync;
141
142     if (CMSampleBufferIsNonDisplaying(m_sample.get()))
143         returnValue |= MediaSample::IsNonDisplaying;
144     
145     return SampleFlags(returnValue);
146 }
147
148 FloatSize MediaSampleAVFObjC::presentationSize() const
149 {
150     CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(m_sample.get());
151     if (CMFormatDescriptionGetMediaType(formatDescription) != kCMMediaType_Video)
152         return FloatSize();
153     
154     return FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
155 }
156
157 void MediaSampleAVFObjC::dump(PrintStream& out) const
158 {
159     out.print("{PTS(", presentationTime(), "), OPTS(", outputPresentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), presentationSize(", presentationSize().width(), "x", presentationSize().height(), ")}");
160 }
161
162 void MediaSampleAVFObjC::offsetTimestampsBy(const MediaTime& offset)
163 {
164     CMItemCount itemCount = 0;
165     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
166         return;
167     
168     Vector<CMSampleTimingInfo> timingInfoArray;
169     timingInfoArray.grow(itemCount);
170     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
171         return;
172     
173     for (auto& timing : timingInfoArray) {
174         timing.presentationTimeStamp = toCMTime(toMediaTime(timing.presentationTimeStamp) + offset);
175         timing.decodeTimeStamp = toCMTime(toMediaTime(timing.decodeTimeStamp) + offset);
176     }
177     
178     CMSampleBufferRef newSample;
179     if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
180         return;
181     
182     m_sample = adoptCF(newSample);
183 }
184
185 void MediaSampleAVFObjC::setTimestamps(const WTF::MediaTime &presentationTimestamp, const WTF::MediaTime &decodeTimestamp)
186 {
187     CMItemCount itemCount = 0;
188     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
189         return;
190     
191     Vector<CMSampleTimingInfo> timingInfoArray;
192     timingInfoArray.grow(itemCount);
193     if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
194         return;
195     
196     for (auto& timing : timingInfoArray) {
197         timing.presentationTimeStamp = toCMTime(presentationTimestamp);
198         timing.decodeTimeStamp = toCMTime(decodeTimestamp);
199     }
200     
201     CMSampleBufferRef newSample;
202     if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
203         return;
204     
205     m_sample = adoptCF(newSample);
206 }
207
208 bool MediaSampleAVFObjC::isDivisable() const
209 {
210     if (CMSampleBufferGetNumSamples(m_sample.get()) == 1)
211         return false;
212
213     if (CMSampleBufferGetSampleSizeArray(m_sample.get(), 0, nullptr, nullptr) == kCMSampleBufferError_BufferHasNoSampleSizes)
214         return false;
215
216     return true;
217 }
218
219 std::pair<RefPtr<MediaSample>, RefPtr<MediaSample>> MediaSampleAVFObjC::divide(const MediaTime& presentationTime)
220 {
221     if (!isDivisable())
222         return { nullptr, nullptr };
223
224     CFIndex samplesBeforePresentationTime = 0;
225
226     CMSampleBufferCallBlockForEachSample(m_sample.get(), [&] (CMSampleBufferRef sampleBuffer, CMItemCount) -> OSStatus {
227         if (toMediaTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) >= presentationTime)
228             return 1;
229         ++samplesBeforePresentationTime;
230         return noErr;
231     });
232
233     if (!samplesBeforePresentationTime)
234         return { nullptr, this };
235
236     CMItemCount sampleCount = CMSampleBufferGetNumSamples(m_sample.get());
237     if (samplesBeforePresentationTime >= sampleCount)
238         return { this, nullptr };
239
240     CMSampleBufferRef rawSampleBefore = nullptr;
241     CFRange rangeBefore = CFRangeMake(0, samplesBeforePresentationTime);
242     if (CMSampleBufferCopySampleBufferForRange(kCFAllocatorDefault, m_sample.get(), rangeBefore, &rawSampleBefore) != noErr)
243         return { nullptr, nullptr };
244     RetainPtr<CMSampleBufferRef> sampleBefore = adoptCF(rawSampleBefore);
245
246     CMSampleBufferRef rawSampleAfter = nullptr;
247     CFRange rangeAfter = CFRangeMake(samplesBeforePresentationTime, sampleCount - samplesBeforePresentationTime);
248     if (CMSampleBufferCopySampleBufferForRange(kCFAllocatorDefault, m_sample.get(), rangeAfter, &rawSampleAfter) != noErr)
249         return { nullptr, nullptr };
250     RetainPtr<CMSampleBufferRef> sampleAfter = adoptCF(rawSampleAfter);
251
252     return { MediaSampleAVFObjC::create(sampleBefore.get(), m_id), MediaSampleAVFObjC::create(sampleAfter.get(), m_id) };
253 }
254
255 Ref<MediaSample> MediaSampleAVFObjC::createNonDisplayingCopy() const
256 {
257     CMSampleBufferRef newSampleBuffer = 0;
258     CMSampleBufferCreateCopy(kCFAllocatorDefault, m_sample.get(), &newSampleBuffer);
259     ASSERT(newSampleBuffer);
260
261     CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
262     for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
263         CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
264         CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
265     }
266
267     return MediaSampleAVFObjC::create(adoptCF(newSampleBuffer).get(), m_id);
268 }
269
270 RefPtr<JSC::Uint8ClampedArray> MediaSampleAVFObjC::getRGBAImageData() const
271 {
272     const OSType imageFormat = kCVPixelFormatType_32RGBA;
273     RetainPtr<CFNumberRef> imageFormatNumber = adoptCF(CFNumberCreate(nullptr,  kCFNumberIntType,  &imageFormat));
274
275     RetainPtr<CFMutableDictionaryRef> conformerOptions = adoptCF(CFDictionaryCreateMutable(0, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
276     CFDictionarySetValue(conformerOptions.get(), kCVPixelBufferPixelFormatTypeKey, imageFormatNumber.get());
277     PixelBufferConformerCV pixelBufferConformer(conformerOptions.get());
278
279     auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(m_sample.get()));
280     auto rgbaPixelBuffer = pixelBufferConformer.convert(pixelBuffer);
281     auto status = CVPixelBufferLockBaseAddress(rgbaPixelBuffer.get(), kCVPixelBufferLock_ReadOnly);
282     ASSERT(status == noErr);
283
284     void* data = CVPixelBufferGetBaseAddressOfPlane(rgbaPixelBuffer.get(), 0);
285     size_t byteLength = CVPixelBufferGetHeight(pixelBuffer) * CVPixelBufferGetWidth(pixelBuffer) * 4;
286     auto result = JSC::Uint8ClampedArray::create(JSC::ArrayBuffer::create(data, byteLength), 0, byteLength);
287
288     status = CVPixelBufferUnlockBaseAddress(rgbaPixelBuffer.get(), kCVPixelBufferLock_ReadOnly);
289     ASSERT(status == noErr);
290
291     return result;
292 }
293
294 }