Remove "virtual" from all lines that have both "virtual" and "override".
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / SourceBufferPrivateAVFObjC.mm
index f799db2..bb700f3 100644 (file)
@@ -29,6 +29,8 @@
 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
 
 #import "BlockExceptions.h"
+#import "CDMSessionAVContentKeySession.h"
+#import "CDMSessionMediaSourceAVFObjC.h"
 #import "ExceptionCodePlaceholder.h"
 #import "Logging.h"
 #import "MediaDescription.h"
@@ -44,7 +46,6 @@
 #import "VideoTrackPrivateMediaSourceAVFObjC.h"
 #import "InbandTextTrackPrivateAVFObjC.h"
 #import <AVFoundation/AVAssetTrack.h>
-#import <CoreMedia/CMSampleBuffer.h>
 #import <QuartzCore/CALayer.h>
 #import <objc/runtime.h>
 #import <wtf/text/AtomicString.h>
 #import <wtf/WeakPtr.h>
 #import <map>
 
-#pragma mark -
-#pragma mark Soft Linking
+#pragma mark - Soft Linking
+
+#import "CoreMediaSoftLink.h"
 
 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
-SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
 
 SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
 SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
@@ -69,50 +70,17 @@ SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
 
-SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
-SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
-SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
-SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
-SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
-SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
-SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
-SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
-
 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
 SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
 
-SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
-SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
-SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
-SOFT_LINK(CoreMedia, CMSampleBufferCreateCopyWithNewTiming, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef originalSBuf, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMSampleBufferRef *sBufCopyOut), (allocator, originalSBuf, numSampleTimingEntries, sampleTimingArray, sBufCopyOut))
-SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
-SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
-SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
-SOFT_LINK(CoreMedia, CMSampleBufferGetFormatDescription, CMFormatDescriptionRef, (CMSampleBufferRef sbuf), (sbuf))
-SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
-SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
-SOFT_LINK(CoreMedia,  CMSampleBufferGetSampleTimingInfoArray, OSStatus, (CMSampleBufferRef sbuf, CMItemCount timingArrayEntries, CMSampleTimingInfo *timingArrayOut, CMItemCount *timingArrayEntriesNeededOut), (sbuf, timingArrayEntries, timingArrayOut, timingArrayEntriesNeededOut))
-SOFT_LINK(CoreMedia, CMSampleBufferGetTotalSampleSize, size_t, (CMSampleBufferRef sbuf), (sbuf))
-SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
-SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
-SOFT_LINK(CoreMedia, CMVideoFormatDescriptionGetPresentationDimensions, CGSize, (CMVideoFormatDescriptionRef videoDesc, Boolean usePixelAspectRatio, Boolean useCleanAperture), (videoDesc, usePixelAspectRatio, useCleanAperture))
-
 #define AVMediaTypeVideo getAVMediaTypeVideo()
 #define AVMediaTypeAudio getAVMediaTypeAudio()
 #define AVMediaTypeText getAVMediaTypeText()
 #define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
 #define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
-#define kCMTimeZero getkCMTimeZero()
-#define kCMTimeInvalid getkCMTimeInvalid()
-#define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
-#define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
-#define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
-#define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
-#define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
-#define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
 
 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
@@ -149,6 +117,7 @@ SOFT_LINK(CoreMedia, CMVideoFormatDescriptionGetPresentationDimensions, CGSize,
 - (NSError*)error;
 - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
 - (void)flush;
+- (void)flushAndRemoveImage;
 - (BOOL)isReadyForMoreMediaData;
 - (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
 - (void)stopRequestingMediaData;
@@ -287,11 +256,6 @@ SOFT_LINK(CoreMedia, CMVideoFormatDescriptionGetPresentationDimensions, CGSize,
 #endif
     ASSERT(streamDataParser == _parser);
 
-    if (isMainThread()) {
-        _parent->willProvideContentKeyRequestInitializationDataForTrackID(trackID);
-        return;
-    }
-
     // We must call synchronously to the main thread, as the AVStreamSession must be associated
     // with the streamDataParser before the delegate method returns.
     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
@@ -310,10 +274,12 @@ SOFT_LINK(CoreMedia, CMVideoFormatDescriptionGetPresentationDimensions, CGSize,
     RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
 
     RetainPtr<NSData> strongData = initData;
-    callOnMainThread([strongSelf, strongData, trackID] {
+    OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore = adoptOSObject(dispatch_semaphore_create(0));
+    callOnMainThread([strongSelf, strongData, trackID,  hasSessionSemaphore] {
         if (strongSelf->_parent)
-            strongSelf->_parent->didProvideContentKeyRequestInitializationDataForTrackID(strongData.get(), trackID);
+            strongSelf->_parent->didProvideContentKeyRequestInitializationDataForTrackID(strongData.get(), trackID, hasSessionSemaphore);
     });
+    dispatch_semaphore_wait(hasSessionSemaphore.get(), DISPATCH_TIME_FOREVER);
 }
 @end
 
@@ -452,12 +418,12 @@ SOFT_LINK(CoreMedia, CMVideoFormatDescriptionGetPresentationDimensions, CGSize,
 - (void)layerFailedToDecode:(NSNotification*)note
 {
     RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
-    ASSERT(_layers.contains(layer.get()));
-
     RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
 
     RetainPtr<WebAVSampleBufferErrorListener> strongSelf = self;
     callOnMainThread([strongSelf, layer, error] {
+        if (!strongSelf->_parent || !strongSelf->_layers.contains(layer.get()))
+            return;
         strongSelf->_parent->layerDidReceiveError(layer.get(), error.get());
     });
 }
@@ -480,17 +446,18 @@ private:
     {
     }
 
-    virtual MediaTime presentationTime() const override { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
-    virtual MediaTime decodeTime() const override { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
-    virtual MediaTime duration() const override { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
-    virtual AtomicString trackID() const override { return m_id; }
-    virtual size_t sizeInBytes() const override { return CMSampleBufferGetTotalSampleSize(m_sample.get()); }
-    virtual FloatSize presentationSize() const override;
+    MediaTime presentationTime() const override { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
+    MediaTime decodeTime() const override { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
+    MediaTime duration() const override { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
+    AtomicString trackID() const override { return m_id; }
+    size_t sizeInBytes() const override { return CMSampleBufferGetTotalSampleSize(m_sample.get()); }
+    FloatSize presentationSize() const override;
 
-    virtual SampleFlags flags() const override;
-    virtual PlatformSample platformSample() override;
-    virtual void dump(PrintStream&) const override;
-    virtual void offsetTimestampsBy(const MediaTime&) override;
+    SampleFlags flags() const override;
+    PlatformSample platformSample() override;
+    void dump(PrintStream&) const override;
+    void offsetTimestampsBy(const MediaTime&) override;
+    void setTimestamps(const MediaTime&, const MediaTime&) override;
 
     RetainPtr<CMSampleBufferRef> m_sample;
     AtomicString m_id;
@@ -537,7 +504,7 @@ FloatSize MediaSampleAVFObjC::presentationSize() const
 
 void MediaSampleAVFObjC::dump(PrintStream& out) const
 {
-    out.print("{PTS(", presentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), presentationSize(", presentationSize(), ")}");
+    out.print("{PTS(", presentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), presentationSize(", presentationSize().width(), "x", presentationSize().height(), ")}");
 }
 
 void MediaSampleAVFObjC::offsetTimestampsBy(const MediaTime& offset)
@@ -563,6 +530,29 @@ void MediaSampleAVFObjC::offsetTimestampsBy(const MediaTime& offset)
     m_sample = adoptCF(newSample);
 }
 
+void MediaSampleAVFObjC::setTimestamps(const WTF::MediaTime &presentationTimestamp, const WTF::MediaTime &decodeTimestamp)
+{
+    CMItemCount itemCount = 0;
+    if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), 0, nullptr, &itemCount))
+        return;
+
+    Vector<CMSampleTimingInfo> timingInfoArray;
+    timingInfoArray.grow(itemCount);
+    if (noErr != CMSampleBufferGetSampleTimingInfoArray(m_sample.get(), itemCount, timingInfoArray.data(), nullptr))
+        return;
+
+    for (auto& timing : timingInfoArray) {
+        timing.presentationTimeStamp = toCMTime(presentationTimestamp);
+        timing.decodeTimeStamp = toCMTime(decodeTimestamp);
+    }
+
+    CMSampleBufferRef newSample;
+    if (noErr != CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, m_sample.get(), itemCount, timingInfoArray.data(), &newSample))
+        return;
+
+    m_sample = adoptCF(newSample);
+}
+
 #pragma mark -
 #pragma mark MediaDescriptionAVFObjC
 
@@ -571,10 +561,10 @@ public:
     static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
     virtual ~MediaDescriptionAVFObjC() { }
 
-    virtual AtomicString codec() const override { return m_codec; }
-    virtual bool isVideo() const override { return m_isVideo; }
-    virtual bool isAudio() const override { return m_isAudio; }
-    virtual bool isText() const override { return m_isText; }
+    AtomicString codec() const override { return m_codec; }
+    bool isVideo() const override { return m_isVideo; }
+    bool isAudio() const override { return m_isAudio; }
+    bool isText() const override { return m_isText; }
     
 protected:
     MediaDescriptionAVFObjC(AVAssetTrack* track)
@@ -622,6 +612,9 @@ SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
     ASSERT(!m_client);
     destroyParser();
     destroyRenderers();
+
+    if (m_hasSessionSemaphore)
+        dispatch_semaphore_signal(m_hasSessionSemaphore.get());
 }
 
 void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
@@ -727,15 +720,19 @@ void SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataF
     LOG(MediaSource, "SourceBufferPrivateAVFObjC::willProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
     m_protectedTrackID = trackID;
 
-    BEGIN_BLOCK_OBJC_EXCEPTIONS;
-    [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
-    END_BLOCK_OBJC_EXCEPTIONS;
+    if (CDMSessionMediaSourceAVFObjC* session = m_mediaSource->player()->cdmSession())
+        session->addParser(m_parser.get());
+    else if (!CDMSessionAVContentKeySession::isAvailable()) {
+        BEGIN_BLOCK_OBJC_EXCEPTIONS;
+        [m_mediaSource->player()->streamSession() addStreamDataParser:m_parser.get()];
+        END_BLOCK_OBJC_EXCEPTIONS;
+    }
 #else
     UNUSED_PARAM(trackID);
 #endif
 }
 
-void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID)
+void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID, OSObjectPtr<dispatch_semaphore_t> hasSessionSemaphore)
 {
     if (!m_mediaSource)
         return;
@@ -747,6 +744,14 @@ void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataFo
     RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
     [initData getBytes:initDataArray->data() length:initDataArray->length()];
     m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
+    if (auto session = m_mediaSource->player()->cdmSession()) {
+        session->addParser(m_parser.get());
+        dispatch_semaphore_signal(hasSessionSemaphore.get());
+    } else {
+        if (m_hasSessionSemaphore)
+            dispatch_semaphore_signal(m_hasSessionSemaphore.get());
+        m_hasSessionSemaphore = hasSessionSemaphore;
+    }
 #else
     UNUSED_PARAM(initData);
 #endif
@@ -829,13 +834,12 @@ void SourceBufferPrivateAVFObjC::destroyRenderers()
         m_displayLayer = nullptr;
     }
 
-    for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
-        AVSampleBufferAudioRenderer* renderer = it->second.get();
+    for (auto& renderer : m_audioRenderers.values()) {
         if (m_mediaSource)
-            m_mediaSource->player()->removeAudioRenderer(renderer);
+            m_mediaSource->player()->removeAudioRenderer(renderer.get());
         [renderer flush];
         [renderer stopRequestingMediaData];
-        [m_errorListener stopObservingRenderer:renderer];
+        [m_errorListener stopObservingRenderer:renderer.get()];
     }
 
     m_audioRenderers.clear();
@@ -908,28 +912,56 @@ void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSou
     int trackID = track->trackID();
 
     if (!track->enabled()) {
-        AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
+        RetainPtr<AVSampleBufferAudioRenderer> renderer = m_audioRenderers.get(trackID);
         [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
         if (m_mediaSource)
-            m_mediaSource->player()->removeAudioRenderer(renderer);
+            m_mediaSource->player()->removeAudioRenderer(renderer.get());
     } else {
         [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
         RetainPtr<AVSampleBufferAudioRenderer> renderer;
-        if (!m_audioRenderers.count(trackID)) {
+        if (!m_audioRenderers.contains(trackID)) {
             renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
             [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
                 didBecomeReadyForMoreSamples(trackID);
             }];
-            m_audioRenderers[trackID] = renderer;
+            m_audioRenderers.set(trackID, renderer);
             [m_errorListener beginObservingRenderer:renderer.get()];
         } else
-            renderer = m_audioRenderers[trackID].get();
+            renderer = m_audioRenderers.get(trackID);
 
         if (m_mediaSource)
             m_mediaSource->player()->addAudioRenderer(renderer.get());
     }
 }
 
+void SourceBufferPrivateAVFObjC::setCDMSession(CDMSessionMediaSourceAVFObjC* session)
+{
+    if (session == m_session)
+        return;
+
+    if (m_session)
+        m_session->removeSourceBuffer(this);
+
+    m_session = session;
+
+    if (m_session) {
+        m_session->addSourceBuffer(this);
+        if (m_hasSessionSemaphore) {
+            dispatch_semaphore_signal(m_hasSessionSemaphore.get());
+            m_hasSessionSemaphore = nullptr;
+        }
+    }
+}
+
+void SourceBufferPrivateAVFObjC::flush()
+{
+    if (m_displayLayer)
+        [m_displayLayer flushAndRemoveImage];
+
+    for (auto& renderer : m_audioRenderers.values())
+        [renderer flush];
+}
+
 void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
 {
     ASSERT(!m_errorClients.contains(client));
@@ -945,8 +977,16 @@ void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPri
 void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
 {
     LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
-    for (auto& client : m_errorClients)
-        client->layerDidReceiveError(layer, error);
+
+    // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
+    bool anyIgnored = false;
+    for (auto& client : m_errorClients) {
+        bool shouldIgnore = false;
+        client->layerDidReceiveError(layer, error, shouldIgnore);
+        anyIgnored |= shouldIgnore;
+    }
+    if (anyIgnored)
+        return;
 
     int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
 
@@ -957,8 +997,16 @@ void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer
 void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
 {
     LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
-    for (auto& client : m_errorClients)
-        client->rendererDidReceiveError(renderer, error);
+
+    // FIXME(142246): Remove the following once <rdar://problem/20027434> is resolved.
+    bool anyIgnored = false;
+    for (auto& client : m_errorClients) {
+        bool shouldIgnore = false;
+        client->rendererDidReceiveError(renderer, error, shouldIgnore);
+        anyIgnored |= shouldIgnore;
+    }
+    if (anyIgnored)
+        return;
 }
 
 static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
@@ -984,8 +1032,8 @@ void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefP
 
     if (trackID == m_enabledVideoTrackID)
         flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
-    else if (m_audioRenderers.count(trackID))
-        flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
+    else if (m_audioRenderers.contains(trackID))
+        flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers.get(trackID).get());
 }
 
 void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
@@ -1028,7 +1076,7 @@ void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefP
 void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
 {
     int trackID = trackIDString.toInt();
-    if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
+    if (trackID != m_enabledVideoTrackID && !m_audioRenderers.contains(trackID))
         return;
 
     RefPtr<MediaSample> mediaSample = prpMediaSample;
@@ -1044,7 +1092,7 @@ void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaS
         if (m_mediaSource)
             m_mediaSource->player()->setHasAvailableVideoFrame(true);
     } else
-        [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
+        [m_audioRenderers.get(trackID) enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
 }
 
 bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
@@ -1052,8 +1100,8 @@ bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDStrin
     int trackID = trackIDString.toInt();
     if (trackID == m_enabledVideoTrackID)
         return [m_displayLayer isReadyForMoreMediaData];
-    else if (m_audioRenderers.count(trackID))
-        return [m_audioRenderers[trackID] isReadyForMoreMediaData];
+    else if (m_audioRenderers.contains(trackID))
+        return [m_audioRenderers.get(trackID) isReadyForMoreMediaData];
     else
         ASSERT_NOT_REACHED();
 
@@ -1088,8 +1136,8 @@ void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
 {
     if (trackID == m_enabledVideoTrackID)
         [m_displayLayer stopRequestingMediaData];
-    else if (m_audioRenderers.count(trackID))
-        [m_audioRenderers[trackID] stopRequestingMediaData];
+    else if (m_audioRenderers.contains(trackID))
+        [m_audioRenderers.get(trackID) stopRequestingMediaData];
     else {
         ASSERT_NOT_REACHED();
         return;
@@ -1106,8 +1154,8 @@ void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicStrin
         [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
             didBecomeReadyForMoreSamples(trackID);
         }];
-    } else if (m_audioRenderers.count(trackID)) {
-        [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+    } else if (m_audioRenderers.contains(trackID)) {
+        [m_audioRenderers.get(trackID) requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
             didBecomeReadyForMoreSamples(trackID);
         }];
     } else