Refactoring: make MediaTime the primary time type for audiovisual times.
authorjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 5 Sep 2014 18:38:59 +0000 (18:38 +0000)
committerjer.noble@apple.com <jer.noble@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Fri, 5 Sep 2014 18:38:59 +0000 (18:38 +0000)
https://bugs.webkit.org/show_bug.cgi?id=133579

Reviewed by Eric Carlson.

Source/JavaScriptCore:

Add a utility function which converts a MediaTime to a JSNumber.

* runtime/JSCJSValue.h:
(JSC::jsNumber):

Source/WebCore:

In order to limit the number of floating-point rounding errors for media systems which
can make use of rational time objects.

Add some convenience methods to convert between QTTime and MediaTime.
* platform/graphics/mac/MediaTimeQTKit.h: Added.
* platform/graphics/mac/MediaTimeQTKit.mm: Added.
(WebCore::toMediaTime):
(WebCore::toQTTime):

Rename MediaTimeMac -> MediaTimeAVFoundation:
* platform/graphics/avfoundation/MediaTimeAVFoundation.cpp: Renamed from Source/WebCore/platform/mac/MediaTimeMac.cpp.
(WebCore::toMediaTime):
(WebCore::toCMTime):
* platform/graphics/avfoundation/MediaTimeAVFoundation.h: Renamed from Source/WebCore/platform/mac/MediaTimeMac.h.

Use MediaTime instead of double:
* Modules/mediasource/MediaSource.cpp:
(WebCore::MediaSource::duration):
(WebCore::MediaSource::currentTime):
(WebCore::MediaSource::buffered):
(WebCore::MediaSource::setDuration):
(WebCore::MediaSource::activeRanges):
* Modules/mediasource/MediaSource.h:
* Modules/mediasource/SourceBuffer.cpp:
(WebCore::SourceBuffer::remove):
(WebCore::SourceBuffer::removeCodedFrames):
(WebCore::SourceBuffer::sourceBufferPrivateDidReceiveInitializationSegment):
(WebCore::SourceBuffer::sourceBufferPrivateDidReceiveSample):
(WebCore::SourceBuffer::hasCurrentTime):
(WebCore::SourceBuffer::hasFutureTime):
(WebCore::SourceBuffer::canPlayThrough):
* WebCore.xcodeproj/project.pbxproj:
* bindings/js/JSDataCueCustom.cpp:
(WebCore::JSDataCueConstructor::constructJSDataCue):
* html/HTMLMediaElement.cpp:
(WebCore::HTMLMediaElement::HTMLMediaElement):
(WebCore::HTMLMediaElement::parseAttribute):
* html/HTMLMediaElement.h:
(WebCore::ValueToString<MediaTime>::string):
* html/MediaFragmentURIParser.cpp:
(WebCore::MediaFragmentURIParser::MediaFragmentURIParser):
(WebCore::MediaFragmentURIParser::startTime):
(WebCore::MediaFragmentURIParser::endTime):
(WebCore::MediaFragmentURIParser::parseTimeFragment):
(WebCore::MediaFragmentURIParser::parseNPTFragment):
(WebCore::MediaFragmentURIParser::parseNPTTime):
(WebCore::MediaFragmentURIParser::invalidTimeValue): Deleted.
* html/MediaFragmentURIParser.h:
* html/TimeRanges.h:
(WebCore::TimeRanges::ranges):
* html/track/DataCue.cpp:
(WebCore::DataCue::DataCue):
* html/track/DataCue.h:
(WebCore::DataCue::create):
* html/track/InbandDataTextTrack.cpp:
(WebCore::InbandDataTextTrack::addDataCue):
(WebCore::InbandDataTextTrack::updateDataCue):
(WebCore::InbandDataTextTrack::removeDataCue):
* html/track/InbandDataTextTrack.h:
* html/track/InbandGenericTextTrack.cpp:
(WebCore::InbandGenericTextTrack::updateCueFromCueData):
(WebCore::InbandGenericTextTrack::addGenericCue):
(WebCore::InbandGenericTextTrack::removeGenericCue):
* html/track/InbandTextTrack.cpp:
(WebCore::InbandTextTrack::startTimeVariance):
* html/track/InbandTextTrack.h:
* html/track/InbandWebVTTTextTrack.cpp:
(WebCore::InbandWebVTTTextTrack::newCuesParsed):
* html/track/TextTrack.cpp:
(WebCore::TextTrack::addCue):
(WebCore::TextTrack::hasCue):
* html/track/TextTrack.h:
(WebCore::TextTrack::startTimeVariance):
* html/track/TextTrackCue.cpp:
(WebCore::TextTrackCue::create):
(WebCore::TextTrackCue::TextTrackCue):
(WebCore::TextTrackCue::setStartTime):
(WebCore::TextTrackCue::setEndTime):
(WebCore::TextTrackCue::hasEquivalentStartTime):
* html/track/TextTrackCue.h:
(WebCore::TextTrackCue::startTime):
(WebCore::TextTrackCue::endTime):
* html/track/TextTrackCueGeneric.cpp:
(WebCore::TextTrackCueGeneric::TextTrackCueGeneric):
* html/track/TextTrackCueGeneric.h:
* html/track/TextTrackCueList.cpp:
(WebCore::TextTrackCueList::add):
* html/track/VTTCue.cpp:
(WebCore::VTTCue::VTTCue):
(WebCore::VTTCue::markFutureAndPastNodes):
(WebCore::VTTCue::updateDisplayTree):
* html/track/VTTCue.h:
(WebCore::VTTCue::create):
* html/track/WebVTTParser.cpp:
(WebCore::WebVTTParser::WebVTTParser):
(WebCore::WebVTTParser::resetCueValues):
(WebCore::WebVTTParser::collectTimeStamp):
(WebCore::WebVTTTreeBuilder::constructTreeFromToken):
* html/track/WebVTTParser.h:
(WebCore::WebVTTCueData::startTime):
(WebCore::WebVTTCueData::setStartTime):
(WebCore::WebVTTCueData::endTime):
(WebCore::WebVTTCueData::setEndTime):
(WebCore::WebVTTCueData::WebVTTCueData): Deleted.
* platform/graphics/InbandTextTrackPrivateClient.h:
(WebCore::GenericCueData::startTime):
(WebCore::GenericCueData::setStartTime):
(WebCore::GenericCueData::endTime):
(WebCore::GenericCueData::setEndTime):
(WebCore::GenericCueData::GenericCueData):
* platform/graphics/MediaPlayer.cpp:
(WebCore::MediaPlayer::duration):
(WebCore::MediaPlayer::startTime):
(WebCore::MediaPlayer::initialTime):
(WebCore::MediaPlayer::currentTime):
(WebCore::MediaPlayer::seekWithTolerance):
(WebCore::MediaPlayer::seek):
(WebCore::MediaPlayer::maxTimeSeekable):
(WebCore::MediaPlayer::minTimeSeekable):
(WebCore::MediaPlayer::mediaTimeForTimeValue):
(WebCore::MediaPlayer::totalFrameDelay):
* platform/graphics/MediaPlayer.h:
* platform/graphics/MediaPlayerPrivate.h:
(WebCore::MediaPlayerPrivateInterface::durationMediaTime):
(WebCore::MediaPlayerPrivateInterface::currentMediaTime):
(WebCore::MediaPlayerPrivateInterface::seek):
(WebCore::MediaPlayerPrivateInterface::seekWithTolerance):
(WebCore::MediaPlayerPrivateInterface::startTime):
(WebCore::MediaPlayerPrivateInterface::initialTime):
(WebCore::MediaPlayerPrivateInterface::seekable):
(WebCore::MediaPlayerPrivateInterface::maxMediaTimeSeekable):
(WebCore::MediaPlayerPrivateInterface::minMediaTimeSeekable):
(WebCore::MediaPlayerPrivateInterface::mediaTimeForTimeValue):
(WebCore::MediaPlayerPrivateInterface::totalFrameDelay):
(WebCore::MediaPlayerPrivateInterface::startTimeDouble): Deleted.
(WebCore::MediaPlayerPrivateInterface::maxTimeSeekableDouble): Deleted.
(WebCore::MediaPlayerPrivateInterface::mediaTimeForTimeValueDouble): Deleted.
* platform/graphics/MediaSourcePrivateClient.h:
* platform/graphics/TrackPrivateBase.h:
(WebCore::TrackPrivateBase::startTimeVariance):
* platform/graphics/avfoundation/InbandMetadataTextTrackPrivateAVF.cpp:
(WebCore::InbandMetadataTextTrackPrivateAVF::InbandMetadataTextTrackPrivateAVF):
(WebCore::InbandMetadataTextTrackPrivateAVF::addDataCue):
(WebCore::InbandMetadataTextTrackPrivateAVF::updatePendingCueEndTimes):
(WebCore::InbandMetadataTextTrackPrivateAVF::flushPartialCues):
* platform/graphics/avfoundation/InbandMetadataTextTrackPrivateAVF.h:
(WebCore::IncompleteMetaDataCue::IncompleteMetaDataCue):
(WebCore::IncompleteMetaDataCue::startTime):
* platform/graphics/avfoundation/InbandTextTrackPrivateAVF.cpp:
(WebCore::InbandTextTrackPrivateAVF::processCue):
(WebCore::InbandTextTrackPrivateAVF::resetCueValues):
* platform/graphics/avfoundation/InbandTextTrackPrivateAVF.h:
* platform/graphics/avfoundation/MediaPlayerPrivateAVFoundation.cpp:
(WebCore::MediaPlayerPrivateAVFoundation::MediaPlayerPrivateAVFoundation):
(WebCore::MediaPlayerPrivateAVFoundation::durationMediaTime):
(WebCore::MediaPlayerPrivateAVFoundation::seek):
(WebCore::MediaPlayerPrivateAVFoundation::seekWithTolerance):
(WebCore::MediaPlayerPrivateAVFoundation::maxMediaTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundation::minMediaTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundation::maxTimeLoaded):
(WebCore::MediaPlayerPrivateAVFoundation::didLoadingProgress):
(WebCore::MediaPlayerPrivateAVFoundation::updateStates):
(WebCore::MediaPlayerPrivateAVFoundation::loadedTimeRangesChanged):
(WebCore::MediaPlayerPrivateAVFoundation::seekableTimeRangesChanged):
(WebCore::MediaPlayerPrivateAVFoundation::timeChanged):
(WebCore::MediaPlayerPrivateAVFoundation::didEnd):
(WebCore::MediaPlayerPrivateAVFoundation::invalidateCachedDuration):
(WebCore::MediaPlayerPrivateAVFoundation::scheduleMainThreadNotification):
(WebCore::MediaPlayerPrivateAVFoundation::extraMemoryCost):
(WebCore::MediaPlayerPrivateAVFoundation::duration): Deleted.
(WebCore::MediaPlayerPrivateAVFoundation::maxTimeSeekableDouble): Deleted.
(WebCore::MediaPlayerPrivateAVFoundation::minTimeSeekable): Deleted.
* platform/graphics/avfoundation/MediaPlayerPrivateAVFoundation.h:
(WebCore::MediaPlayerPrivateAVFoundation::Notification::Notification):
(WebCore::MediaPlayerPrivateAVFoundation::Notification::time):
* platform/graphics/avfoundation/cf/MediaPlayerPrivateAVFoundationCF.cpp:
(WebCore::MediaPlayerPrivateAVFoundationCF::platformDuration):
(WebCore::MediaPlayerPrivateAVFoundationCF::currentTime):
(WebCore::MediaPlayerPrivateAVFoundationCF::seekToTime):
(WebCore::MediaPlayerPrivateAVFoundationCF::platformDuration):
(WebCore::MediaPlayerPrivateAVFoundationCF::currentTime):
(WebCore::MediaPlayerPrivateAVFoundationCF::seekToTime):
(WebCore::MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded):
(WebCore::MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue): Deleted.
(WebCore::AVFWrapper::seekToTime): Deleted.
(WebCore::LegibleOutputData::LegibleOutputData): Deleted.
(WebCore::AVFWrapper::createImageForTimeInRect): Deleted.
(WebCore::MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded):
(WebCore::MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue): Deleted.
(WebCore::AVFWrapper::seekToTime): Deleted.
(WebCore::LegibleOutputData::LegibleOutputData): Deleted.
(WebCore::AVFWrapper::createImageForTimeInRect): Deleted.
* platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
(WebCore::MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC):
(WebCore::MediaPlayerPrivateAVFoundationObjC::cancelLoad):
(WebCore::MediaPlayerPrivateAVFoundationObjC::platformDuration):
(WebCore::MediaPlayerPrivateAVFoundationObjC::currentMediaTime):
(WebCore::MediaPlayerPrivateAVFoundationObjC::seekToTime):
(WebCore::MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable):
(WebCore::MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded):
(WebCore::MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue):
(WebCore::MediaPlayerPrivateAVFoundationObjC::processCue):
(WebCore::MediaPlayerPrivateAVFoundationObjC::metadataDidArrive):
(WebCore::MediaPlayerPrivateAVFoundationObjC::durationDidChange):
(-[WebCoreAVFMovieObserver observeValueForKeyPath:ofObject:change:context:]):
(-[WebCoreAVFMovieObserver legibleOutput:didOutputAttributedStrings:nativeSampleBuffers:forItemTime:]):
(WebCore::MediaPlayerPrivateAVFoundationObjC::currentTime): Deleted.
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::startTime):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::initialTime):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekInternal):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekable):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::totalFrameDelay):
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::durationDouble): Deleted.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble): Deleted.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::startTimeDouble): Deleted.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::maxTimeSeekableDouble): Deleted.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::minTimeSeekable): Deleted.
* platform/graphics/avfoundation/objc/OutOfBandTextTrackPrivateAVF.h:
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
* platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h:
(WebCore::InbandMetadataTextTrackPrivateGStreamer::addDataCue):
* platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h:
(WebCore::InbandMetadataTextTrackPrivateGStreamer::addDataCue):
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
(WebCore::MediaPlayerPrivateGStreamer::processMpegTsSection):
(WebCore::MediaPlayerPrivateGStreamer::processTableOfContentsEntry):
(WebCore::MediaPlayerPrivateGStreamer::processMpegTsSection):
(WebCore::MediaPlayerPrivateGStreamer::processTableOfContentsEntry):
* platform/graphics/mac/MediaPlayerPrivateQTKit.h:
* platform/graphics/mac/MediaPlayerPrivateQTKit.mm:
* platform/graphics/mac/MediaPlayerPrivateQTKit.mm:
(WebCore::maxValueForTimeRanges):
(WebCore::MediaPlayerPrivateQTKit::MediaPlayerPrivateQTKit):
(WebCore::MediaPlayerPrivateQTKit::durationMediaTime):
(WebCore::MediaPlayerPrivateQTKit::currentMediaTime):
(WebCore::MediaPlayerPrivateQTKit::seek):
(WebCore::MediaPlayerPrivateQTKit::doSeek):
(WebCore::MediaPlayerPrivateQTKit::cancelSeek):
(WebCore::MediaPlayerPrivateQTKit::seekTimerFired):
(WebCore::MediaPlayerPrivateQTKit::seeking):
(WebCore::MediaPlayerPrivateQTKit::setPreservesPitch):
(WebCore::MediaPlayerPrivateQTKit::buffered):
(WebCore::MediaPlayerPrivateQTKit::maxMediaTimeSeekable):
(WebCore::MediaPlayerPrivateQTKit::maxMediaTimeLoaded):
(WebCore::MediaPlayerPrivateQTKit::didLoadingProgress):
(WebCore::MediaPlayerPrivateQTKit::updateStates):
(WebCore::MediaPlayerPrivateQTKit::timeChanged):
(WebCore::MediaPlayerPrivateQTKit::didEnd):
(WebCore::MediaPlayerPrivateQTKit::maxMediaTimeSeekable):
(WebCore::MediaPlayerPrivateQTKit::maxMediaTimeLoaded):
(WebCore::MediaPlayerPrivateQTKit::createQTTime): Deleted.
(WebCore::MediaPlayerPrivateQTKit::duration): Deleted.
(WebCore::MediaPlayerPrivateQTKit::currentTime): Deleted.
(WebCore::MediaPlayerPrivateQTKit::maxTimeSeekable): Deleted.
(WebCore::MediaPlayerPrivateQTKit::maxTimeLoaded): Deleted.
(WebCore::MediaPlayerPrivateQTKit::mediaTimeForTimeValue): Deleted.
* platform/mac/PlatformClockCM.mm:
* platform/mock/mediasource/MockMediaPlayerMediaSource.cpp:
(WebCore::MockMediaPlayerMediaSource::maxMediaTimeSeekable):
(WebCore::MockMediaPlayerMediaSource::currentMediaTime):
(WebCore::MockMediaPlayerMediaSource::durationMediaTime):
(WebCore::MockMediaPlayerMediaSource::seekWithTolerance):
(WebCore::MockMediaPlayerMediaSource::totalFrameDelay):
(WebCore::MockMediaPlayerMediaSource::maxTimeSeekableDouble): Deleted.
(WebCore::MockMediaPlayerMediaSource::currentTimeDouble): Deleted.
(WebCore::MockMediaPlayerMediaSource::durationDouble): Deleted.
* platform/mock/mediasource/MockMediaPlayerMediaSource.h:
* platform/mock/mediasource/MockMediaSourcePrivate.cpp:
(WebCore::MockMediaSourcePrivate::MockMediaSourcePrivate):
* platform/mock/mediasource/MockMediaSourcePrivate.h:
* platform/mock/mediasource/MockSourceBufferPrivate.cpp:
(WebCore::MockSourceBufferPrivate::enqueueSample):

Source/WTF:

Add a unary minus operator, and add unimplemented private casting operators, to make
unintentional double->MediaTime and MediaTime->double casts hard errors.

* wtf/MediaTime.cpp:
(WTF::MediaTime::operator-):
* wtf/MediaTime.h:

LayoutTests:

Update the http/media tests to use byte-ranges, and update our byte-range CGI script
to return correct headers. Remove the platform expected results for media/video-seek-past-end-paused.html
now that we pass.

* http/tests/media/reload-after-dialog.html:
* http/tests/media/video-error-abort.html:
* http/tests/media/video-throttled-load.cgi:
* platform/mac/media/video-seek-past-end-paused-expected.txt: Removed.
* platform/mac/TestExpectations:

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@173318 268f45cc-cd09-0410-ab3c-d52691b4dbfc

80 files changed:
LayoutTests/ChangeLog
LayoutTests/http/tests/media/reload-after-dialog.html
LayoutTests/http/tests/media/video-error-abort.html
LayoutTests/http/tests/media/video-throttled-load.cgi
LayoutTests/platform/mac/TestExpectations
LayoutTests/platform/mac/media/video-seek-past-end-paused-expected.txt [deleted file]
Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/runtime/JSCJSValue.h
Source/WTF/ChangeLog
Source/WTF/wtf/MediaTime.cpp
Source/WTF/wtf/MediaTime.h
Source/WebCore/ChangeLog
Source/WebCore/Modules/mediasource/MediaSource.cpp
Source/WebCore/Modules/mediasource/MediaSource.h
Source/WebCore/Modules/mediasource/SourceBuffer.cpp
Source/WebCore/Modules/mediasource/SourceBuffer.h
Source/WebCore/WebCore.vcxproj/WebCore.vcxproj
Source/WebCore/WebCore.xcodeproj/project.pbxproj
Source/WebCore/bindings/js/JSDataCueCustom.cpp
Source/WebCore/html/HTMLMediaElement.cpp
Source/WebCore/html/HTMLMediaElement.h
Source/WebCore/html/MediaController.cpp
Source/WebCore/html/MediaFragmentURIParser.cpp
Source/WebCore/html/MediaFragmentURIParser.h
Source/WebCore/html/TimeRanges.h
Source/WebCore/html/track/DataCue.cpp
Source/WebCore/html/track/DataCue.h
Source/WebCore/html/track/InbandDataTextTrack.cpp
Source/WebCore/html/track/InbandDataTextTrack.h
Source/WebCore/html/track/InbandGenericTextTrack.cpp
Source/WebCore/html/track/InbandTextTrack.cpp
Source/WebCore/html/track/InbandTextTrack.h
Source/WebCore/html/track/TextTrack.cpp
Source/WebCore/html/track/TextTrack.h
Source/WebCore/html/track/TextTrackCue.cpp
Source/WebCore/html/track/TextTrackCue.h
Source/WebCore/html/track/TextTrackCueGeneric.cpp
Source/WebCore/html/track/TextTrackCueGeneric.h
Source/WebCore/html/track/TextTrackCueList.cpp
Source/WebCore/html/track/VTTCue.cpp
Source/WebCore/html/track/VTTCue.h
Source/WebCore/html/track/WebVTTParser.cpp
Source/WebCore/html/track/WebVTTParser.h
Source/WebCore/platform/graphics/InbandTextTrackPrivateClient.h
Source/WebCore/platform/graphics/MediaPlayer.cpp
Source/WebCore/platform/graphics/MediaPlayer.h
Source/WebCore/platform/graphics/MediaPlayerPrivate.h
Source/WebCore/platform/graphics/MediaSourcePrivateClient.h
Source/WebCore/platform/graphics/TrackPrivateBase.h
Source/WebCore/platform/graphics/avfoundation/InbandMetadataTextTrackPrivateAVF.cpp
Source/WebCore/platform/graphics/avfoundation/InbandMetadataTextTrackPrivateAVF.h
Source/WebCore/platform/graphics/avfoundation/InbandTextTrackPrivateAVF.cpp
Source/WebCore/platform/graphics/avfoundation/InbandTextTrackPrivateAVF.h
Source/WebCore/platform/graphics/avfoundation/MediaPlayerPrivateAVFoundation.cpp
Source/WebCore/platform/graphics/avfoundation/MediaPlayerPrivateAVFoundation.h
Source/WebCore/platform/graphics/avfoundation/MediaTimeAVFoundation.cpp [moved from Source/WebCore/platform/mac/MediaTimeMac.cpp with 80% similarity]
Source/WebCore/platform/graphics/avfoundation/MediaTimeAVFoundation.h [moved from Source/WebCore/platform/mac/MediaTimeMac.h with 94% similarity]
Source/WebCore/platform/graphics/avfoundation/cf/MediaPlayerPrivateAVFoundationCF.cpp
Source/WebCore/platform/graphics/avfoundation/cf/MediaPlayerPrivateAVFoundationCF.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h
Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm
Source/WebCore/platform/graphics/avfoundation/objc/MediaSourcePrivateAVFObjC.mm
Source/WebCore/platform/graphics/avfoundation/objc/OutOfBandTextTrackPrivateAVF.h
Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm
Source/WebCore/platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h
Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
Source/WebCore/platform/graphics/gstreamer/MediaSourceGStreamer.cpp
Source/WebCore/platform/graphics/mac/MediaPlayerPrivateQTKit.h
Source/WebCore/platform/graphics/mac/MediaPlayerPrivateQTKit.mm
Source/WebCore/platform/graphics/mac/MediaTimeQTKit.h [new file with mode: 0644]
Source/WebCore/platform/graphics/mac/MediaTimeQTKit.mm [new file with mode: 0644]
Source/WebCore/platform/mac/PlatformClockCM.mm
Source/WebCore/platform/mock/mediasource/MockMediaPlayerMediaSource.cpp
Source/WebCore/platform/mock/mediasource/MockMediaPlayerMediaSource.h
Source/WebCore/platform/mock/mediasource/MockMediaSourcePrivate.cpp
Source/WebCore/platform/mock/mediasource/MockMediaSourcePrivate.h
Source/WebCore/platform/mock/mediasource/MockSourceBufferPrivate.cpp
Source/WebKit2/WebProcess/WebPage/mac/TiledCoreAnimationDrawingArea.mm

index a108313..1ef2b33 100644 (file)
@@ -1,3 +1,20 @@
+2014-06-10  Jer Noble  <jer.noble@apple.com>
+
+        Refactoring: make MediaTime the primary time type for audiovisual times.
+        https://bugs.webkit.org/show_bug.cgi?id=133579
+
+        Reviewed by Eric Carlson.
+
+        Update the http/media tests to use byte-ranges, and update our byte-range CGI script
+        to return correct headers. Remove the platform expected results for media/video-seek-past-end-paused.html
+        now that we pass.
+
+        * http/tests/media/reload-after-dialog.html:
+        * http/tests/media/video-error-abort.html:
+        * http/tests/media/video-throttled-load.cgi:
+        * platform/mac/media/video-seek-past-end-paused-expected.txt: Removed.
+        * platform/mac/TestExpectations:
+
 2014-09-04  Alexey Proskuryakov  <ap@apple.com>
 
         platform/mac-wk2/tiled-drawing/scrolling/frames/frameset-nested-frame-scrollability.html is flakey
index de6f3ca..fc0b506 100644 (file)
@@ -46,7 +46,7 @@
 
                 findMediaElement();
                 var movie = findMediaFile("video", "../resources/test");
-                video.src = "http://127.0.0.1:8000/media/video-throttled-load.cgi?name=" + movie + "&throttle=100";
+                video.src = "http://127.0.0.1:8000/media/video-throttled-load.cgi?name=" + movie + "&throttle=100&nph=1";
             }
 
         </script>
index abf1a37..57d2ae0 100644 (file)
@@ -50,7 +50,7 @@
                 testExpected("video.error", null);
 
                 var movie = findMediaFile("video", "../resources/test");
-                video.src = "http://127.0.0.1:8000/media/video-throttled-load.cgi?name=" + movie + "&throttle=256";
+                video.src = "http://127.0.0.1:8000/media/video-throttled-load.cgi?name=" + movie + "&throttle=256&nph=1";
             }
         </script>
     </head>
index 7973953..86e6ee1 100755 (executable)
@@ -30,6 +30,7 @@ my @parsedRange = (0, $rangeEnd);
 if ($nph) {
     # Handle HTTP Range requests.
     my $httpContentRange;
+    my $httpContentLength;
     my $httpStatus;
 
     if ($contentRange) {
@@ -42,13 +43,15 @@ if ($nph) {
         }
         $httpStatus = "206 Partial Content";
         $httpContentRange = "bytes " . $parsedRange[0] . "-" . $parsedRange[1] . "/" . $filesize;
+        $httpContentLength = $parsedRange[1] - $parsedRange[0] + 1;
     } else {
         $httpStatus = "200 OK";
+        $httpContentLength = $filesize;
     }
 
     print "Status: " . $httpStatus . "\n";
     print "Connection: close\n";
-    print "Content-Length: " . $filesize . "\n";
+    print "Content-Length: " . $httpContentLength . "\n";
     print "Content-Type: " . $type . "\n";
     print "Accept-Ranges: bytes\n";
     if ($httpContentRange) {
@@ -67,10 +70,12 @@ open FILE, $name or die;
 binmode FILE;
 my ($data, $n);
 my $total = $parsedRange[0];
+my $length = $parsedRange[1] - $parsedRange[0];
+my $chunkLength = $length < 1024 ? $length : 1024;
 
 seek(FILE, $parsedRange[0], 0);
 
-while (($n = read FILE, $data, 1024) != 0) {
+while (($n = read FILE, $data, $chunkLength) != 0) {
     print $data;
 
     $total += $n;
index 6293970..0d9cbec 100755 (executable)
@@ -1401,3 +1401,6 @@ webkit.org/b/135133 [ MountainLion ] fast/layers/no-clipping-overflow-hidden-add
 
 webkit.org/b/135160 media/track/track-in-band-subtitles-too-large.html [ Pass Failure ]
 webkit.org/b/135160 media/track/track-long-word-container-sizing.html [ Pass Failure ]
+
+webkit.org/b/136532 [ MountainLion ] media/audio-data-url.html [ Failure ]
+webkit.org/b/136532 [ MountainLion ] media/sources-fallback-codecs.html [ Failure ]
diff --git a/LayoutTests/platform/mac/media/video-seek-past-end-paused-expected.txt b/LayoutTests/platform/mac/media/video-seek-past-end-paused-expected.txt
deleted file mode 100644 (file)
index 5637bda..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-
-Test that seeking a paused video past its end sets currentTime to duration and leaves the video paused.
-
-EVENT(canplaythrough)
-EXPECTED (video.paused == 'true') OK
-EXPECTED (video.ended == 'false') OK
-RUN(video.play())
-
-EXPECTED (video.paused == 'false') OK
-EXPECTED (mediaElement.currentTime > '0') OK
-
-EXPECTED (video.paused == 'true') OK
-
-EXPECTED (video.paused == 'true') OK
-EXPECTED (mediaElement.currentTime == 'mediaElement.duration'), OBSERVED '6.026666666666666' FAIL
-EXPECTED (video.ended == 'true'), OBSERVED 'false' FAIL
-
-END OF TEST
-
index 906318b..1d086ab 100644 (file)
@@ -1,3 +1,15 @@
+2014-06-06  Jer Noble  <jer.noble@apple.com>
+
+        Refactoring: make MediaTime the primary time type for audiovisual times.
+        https://bugs.webkit.org/show_bug.cgi?id=133579
+
+        Reviewed by Eric Carlson.
+
+        Add a utility function which converts a MediaTime to a JSNumber.
+
+        * runtime/JSCJSValue.h:
+        (JSC::jsNumber):
+
 2014-09-04  Michael Saboff  <msaboff@apple.com>
 
         ARM: Add more coverage to ARMv7 disassembler
index 620418a..3bbfae1 100644 (file)
@@ -32,6 +32,7 @@
 #include <wtf/HashMap.h>
 #include <wtf/HashTraits.h>
 #include <wtf/MathExtras.h>
+#include <wtf/MediaTime.h>
 #include <wtf/StdLibExtras.h>
 #include <wtf/TriState.h>
 
@@ -472,6 +473,11 @@ ALWAYS_INLINE JSValue jsNumber(double d)
     return JSValue(d);
 }
 
+ALWAYS_INLINE JSValue jsNumber(MediaTime t)
+{
+    return jsNumber(t.toDouble());
+}
+
 ALWAYS_INLINE JSValue jsNumber(char i)
 {
     return JSValue(i);
index fee4c78..da4b124 100644 (file)
@@ -1,3 +1,17 @@
+2014-06-06  Jer Noble  <jer.noble@apple.com>
+
+        Refactoring: make MediaTime the primary time type for audiovisual times.
+        https://bugs.webkit.org/show_bug.cgi?id=133579
+
+        Reviewed by Eric Carlson.
+
+        Add a unary minus operator, and add unimplemented private casting operators, to make
+        unintentional double->MediaTime and MediaTime->double casts hard errors.
+
+        * wtf/MediaTime.cpp:
+        (WTF::MediaTime::operator-):
+        * wtf/MediaTime.h:
+
 2014-09-04  Geoffrey Garen  <ggaren@apple.com>
 
         Do the bmalloc.
index 8176ea2..dce3ba6 100644 (file)
@@ -219,6 +219,25 @@ MediaTime MediaTime::operator-(const MediaTime& rhs) const
     return a;
 }
 
+MediaTime MediaTime::operator-() const
+{
+    if (isInvalid())
+        return invalidTime();
+
+    if (isIndefinite())
+        return indefiniteTime();
+
+    if (isPositiveInfinite())
+        return negativeInfiniteTime();
+
+    if (isNegativeInfinite())
+        return positiveInfiniteTime();
+
+    MediaTime negativeTime = *this;
+    negativeTime.m_timeValue = -negativeTime.m_timeValue;
+    return negativeTime;
+}
+
 MediaTime MediaTime::operator*(int32_t rhs) const
 {
     if (isInvalid())
@@ -283,6 +302,16 @@ bool MediaTime::operator<=(const MediaTime& rhs) const
     return compare(rhs) <= EqualTo;
 }
 
+bool MediaTime::operator!() const
+{
+    return compare(zeroTime()) == EqualTo;
+}
+
+MediaTime::operator bool() const
+{
+    return compare(zeroTime()) != EqualTo;
+}
+
 MediaTime::ComparisonFlags MediaTime::compare(const MediaTime& rhs) const
 {
     if ((isPositiveInfinite() && rhs.isPositiveInfinite())
index c4b18ce..d901c7e 100644 (file)
@@ -67,6 +67,7 @@ public:
     MediaTime& operator-=(const MediaTime& rhs) { return *this = *this - rhs; }
     MediaTime operator+(const MediaTime& rhs) const;
     MediaTime operator-(const MediaTime& rhs) const;
+    MediaTime operator-() const;
     MediaTime operator*(int32_t) const;
     bool operator<(const MediaTime& rhs) const;
     bool operator>(const MediaTime& rhs) const;
@@ -74,6 +75,8 @@ public:
     bool operator==(const MediaTime& rhs) const;
     bool operator>=(const MediaTime& rhs) const;
     bool operator<=(const MediaTime& rhs) const;
+    bool operator!() const;
+    explicit operator bool() const;
 
     typedef enum {
         LessThan = -1,
@@ -101,6 +104,12 @@ public:
 
     void dump(PrintStream& out) const;
 
+    // Make the following casts errors:
+    operator double() const = delete;
+    MediaTime(double) = delete;
+    operator int() const = delete;
+    MediaTime(int) = delete;
+
     friend WTF_EXPORT_PRIVATE MediaTime abs(const MediaTime& rhs);
 private:
     static const int32_t DefaultTimeScale = 10000000;
index dc2c6f2..8381a98 100644 (file)
@@ -1,3 +1,295 @@
+2014-06-06  Jer Noble  <jer.noble@apple.com>
+
+        Refactoring: make MediaTime the primary time type for audiovisual times.
+        https://bugs.webkit.org/show_bug.cgi?id=133579
+
+        Reviewed by Eric Carlson.
+
+        In order to limit the number of floating-point rounding errors for media systems which
+        can make use of rational time objects.
+
+        Add some convenience methods to convert between QTTime and MediaTime.
+        * platform/graphics/mac/MediaTimeQTKit.h: Added.
+        * platform/graphics/mac/MediaTimeQTKit.mm: Added.
+        (WebCore::toMediaTime):
+        (WebCore::toQTTime):
+
+        Rename MediaTimeMac -> MediaTimeAVFoundation:
+        * platform/graphics/avfoundation/MediaTimeAVFoundation.cpp: Renamed from Source/WebCore/platform/mac/MediaTimeMac.cpp.
+        (WebCore::toMediaTime):
+        (WebCore::toCMTime):
+        * platform/graphics/avfoundation/MediaTimeAVFoundation.h: Renamed from Source/WebCore/platform/mac/MediaTimeMac.h.
+
+        Use MediaTime instead of double:
+        * Modules/mediasource/MediaSource.cpp:
+        (WebCore::MediaSource::duration):
+        (WebCore::MediaSource::currentTime):
+        (WebCore::MediaSource::buffered):
+        (WebCore::MediaSource::setDuration):
+        (WebCore::MediaSource::activeRanges):
+        * Modules/mediasource/MediaSource.h:
+        * Modules/mediasource/SourceBuffer.cpp:
+        (WebCore::SourceBuffer::remove):
+        (WebCore::SourceBuffer::removeCodedFrames):
+        (WebCore::SourceBuffer::sourceBufferPrivateDidReceiveInitializationSegment):
+        (WebCore::SourceBuffer::sourceBufferPrivateDidReceiveSample):
+        (WebCore::SourceBuffer::hasCurrentTime):
+        (WebCore::SourceBuffer::hasFutureTime):
+        (WebCore::SourceBuffer::canPlayThrough):
+        * WebCore.xcodeproj/project.pbxproj:
+        * bindings/js/JSDataCueCustom.cpp:
+        (WebCore::JSDataCueConstructor::constructJSDataCue):
+        * html/HTMLMediaElement.cpp:
+        (WebCore::HTMLMediaElement::HTMLMediaElement):
+        (WebCore::HTMLMediaElement::parseAttribute):
+        * html/HTMLMediaElement.h:
+        (WebCore::ValueToString<MediaTime>::string):
+        * html/MediaFragmentURIParser.cpp:
+        (WebCore::MediaFragmentURIParser::MediaFragmentURIParser):
+        (WebCore::MediaFragmentURIParser::startTime):
+        (WebCore::MediaFragmentURIParser::endTime):
+        (WebCore::MediaFragmentURIParser::parseTimeFragment):
+        (WebCore::MediaFragmentURIParser::parseNPTFragment):
+        (WebCore::MediaFragmentURIParser::parseNPTTime):
+        (WebCore::MediaFragmentURIParser::invalidTimeValue): Deleted.
+        * html/MediaFragmentURIParser.h:
+        * html/TimeRanges.h:
+        (WebCore::TimeRanges::ranges):
+        * html/track/DataCue.cpp:
+        (WebCore::DataCue::DataCue):
+        * html/track/DataCue.h:
+        (WebCore::DataCue::create):
+        * html/track/InbandDataTextTrack.cpp:
+        (WebCore::InbandDataTextTrack::addDataCue):
+        (WebCore::InbandDataTextTrack::updateDataCue):
+        (WebCore::InbandDataTextTrack::removeDataCue):
+        * html/track/InbandDataTextTrack.h:
+        * html/track/InbandGenericTextTrack.cpp:
+        (WebCore::InbandGenericTextTrack::updateCueFromCueData):
+        (WebCore::InbandGenericTextTrack::addGenericCue):
+        (WebCore::InbandGenericTextTrack::removeGenericCue):
+        * html/track/InbandTextTrack.cpp:
+        (WebCore::InbandTextTrack::startTimeVariance):
+        * html/track/InbandTextTrack.h:
+        * html/track/InbandWebVTTTextTrack.cpp:
+        (WebCore::InbandWebVTTTextTrack::newCuesParsed):
+        * html/track/TextTrack.cpp:
+        (WebCore::TextTrack::addCue):
+        (WebCore::TextTrack::hasCue):
+        * html/track/TextTrack.h:
+        (WebCore::TextTrack::startTimeVariance):
+        * html/track/TextTrackCue.cpp:
+        (WebCore::TextTrackCue::create):
+        (WebCore::TextTrackCue::TextTrackCue):
+        (WebCore::TextTrackCue::setStartTime):
+        (WebCore::TextTrackCue::setEndTime):
+        (WebCore::TextTrackCue::hasEquivalentStartTime):
+        * html/track/TextTrackCue.h:
+        (WebCore::TextTrackCue::startTime):
+        (WebCore::TextTrackCue::endTime):
+        * html/track/TextTrackCueGeneric.cpp:
+        (WebCore::TextTrackCueGeneric::TextTrackCueGeneric):
+        * html/track/TextTrackCueGeneric.h:
+        * html/track/TextTrackCueList.cpp:
+        (WebCore::TextTrackCueList::add):
+        * html/track/VTTCue.cpp:
+        (WebCore::VTTCue::VTTCue):
+        (WebCore::VTTCue::markFutureAndPastNodes):
+        (WebCore::VTTCue::updateDisplayTree):
+        * html/track/VTTCue.h:
+        (WebCore::VTTCue::create):
+        * html/track/WebVTTParser.cpp:
+        (WebCore::WebVTTParser::WebVTTParser):
+        (WebCore::WebVTTParser::resetCueValues):
+        (WebCore::WebVTTParser::collectTimeStamp):
+        (WebCore::WebVTTTreeBuilder::constructTreeFromToken):
+        * html/track/WebVTTParser.h:
+        (WebCore::WebVTTCueData::startTime):
+        (WebCore::WebVTTCueData::setStartTime):
+        (WebCore::WebVTTCueData::endTime):
+        (WebCore::WebVTTCueData::setEndTime):
+        (WebCore::WebVTTCueData::WebVTTCueData): Deleted.
+        * platform/graphics/InbandTextTrackPrivateClient.h:
+        (WebCore::GenericCueData::startTime):
+        (WebCore::GenericCueData::setStartTime):
+        (WebCore::GenericCueData::endTime):
+        (WebCore::GenericCueData::setEndTime):
+        (WebCore::GenericCueData::GenericCueData):
+        * platform/graphics/MediaPlayer.cpp:
+        (WebCore::MediaPlayer::duration):
+        (WebCore::MediaPlayer::startTime):
+        (WebCore::MediaPlayer::initialTime):
+        (WebCore::MediaPlayer::currentTime):
+        (WebCore::MediaPlayer::seekWithTolerance):
+        (WebCore::MediaPlayer::seek):
+        (WebCore::MediaPlayer::maxTimeSeekable):
+        (WebCore::MediaPlayer::minTimeSeekable):
+        (WebCore::MediaPlayer::mediaTimeForTimeValue):
+        (WebCore::MediaPlayer::totalFrameDelay):
+        * platform/graphics/MediaPlayer.h:
+        * platform/graphics/MediaPlayerPrivate.h:
+        (WebCore::MediaPlayerPrivateInterface::durationMediaTime):
+        (WebCore::MediaPlayerPrivateInterface::currentMediaTime):
+        (WebCore::MediaPlayerPrivateInterface::seek):
+        (WebCore::MediaPlayerPrivateInterface::seekWithTolerance):
+        (WebCore::MediaPlayerPrivateInterface::startTime):
+        (WebCore::MediaPlayerPrivateInterface::initialTime):
+        (WebCore::MediaPlayerPrivateInterface::seekable):
+        (WebCore::MediaPlayerPrivateInterface::maxMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateInterface::minMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateInterface::mediaTimeForTimeValue):
+        (WebCore::MediaPlayerPrivateInterface::totalFrameDelay):
+        (WebCore::MediaPlayerPrivateInterface::startTimeDouble): Deleted.
+        (WebCore::MediaPlayerPrivateInterface::maxTimeSeekableDouble): Deleted.
+        (WebCore::MediaPlayerPrivateInterface::mediaTimeForTimeValueDouble): Deleted.
+        * platform/graphics/MediaSourcePrivateClient.h:
+        * platform/graphics/TrackPrivateBase.h:
+        (WebCore::TrackPrivateBase::startTimeVariance):
+        * platform/graphics/avfoundation/InbandMetadataTextTrackPrivateAVF.cpp:
+        (WebCore::InbandMetadataTextTrackPrivateAVF::InbandMetadataTextTrackPrivateAVF):
+        (WebCore::InbandMetadataTextTrackPrivateAVF::addDataCue):
+        (WebCore::InbandMetadataTextTrackPrivateAVF::updatePendingCueEndTimes):
+        (WebCore::InbandMetadataTextTrackPrivateAVF::flushPartialCues):
+        * platform/graphics/avfoundation/InbandMetadataTextTrackPrivateAVF.h:
+        (WebCore::IncompleteMetaDataCue::IncompleteMetaDataCue):
+        (WebCore::IncompleteMetaDataCue::startTime):
+        * platform/graphics/avfoundation/InbandTextTrackPrivateAVF.cpp:
+        (WebCore::InbandTextTrackPrivateAVF::processCue):
+        (WebCore::InbandTextTrackPrivateAVF::resetCueValues):
+        * platform/graphics/avfoundation/InbandTextTrackPrivateAVF.h:
+        * platform/graphics/avfoundation/MediaPlayerPrivateAVFoundation.cpp:
+        (WebCore::MediaPlayerPrivateAVFoundation::MediaPlayerPrivateAVFoundation):
+        (WebCore::MediaPlayerPrivateAVFoundation::durationMediaTime):
+        (WebCore::MediaPlayerPrivateAVFoundation::seek):
+        (WebCore::MediaPlayerPrivateAVFoundation::seekWithTolerance):
+        (WebCore::MediaPlayerPrivateAVFoundation::maxMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundation::minMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundation::maxTimeLoaded):
+        (WebCore::MediaPlayerPrivateAVFoundation::didLoadingProgress):
+        (WebCore::MediaPlayerPrivateAVFoundation::updateStates):
+        (WebCore::MediaPlayerPrivateAVFoundation::loadedTimeRangesChanged):
+        (WebCore::MediaPlayerPrivateAVFoundation::seekableTimeRangesChanged):
+        (WebCore::MediaPlayerPrivateAVFoundation::timeChanged):
+        (WebCore::MediaPlayerPrivateAVFoundation::didEnd):
+        (WebCore::MediaPlayerPrivateAVFoundation::invalidateCachedDuration):
+        (WebCore::MediaPlayerPrivateAVFoundation::scheduleMainThreadNotification):
+        (WebCore::MediaPlayerPrivateAVFoundation::extraMemoryCost):
+        (WebCore::MediaPlayerPrivateAVFoundation::duration): Deleted.
+        (WebCore::MediaPlayerPrivateAVFoundation::maxTimeSeekableDouble): Deleted.
+        (WebCore::MediaPlayerPrivateAVFoundation::minTimeSeekable): Deleted.
+        * platform/graphics/avfoundation/MediaPlayerPrivateAVFoundation.h:
+        (WebCore::MediaPlayerPrivateAVFoundation::Notification::Notification):
+        (WebCore::MediaPlayerPrivateAVFoundation::Notification::time):
+        * platform/graphics/avfoundation/cf/MediaPlayerPrivateAVFoundationCF.cpp:
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformDuration):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::currentTime):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::seekToTime):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformDuration):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::currentTime):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::seekToTime):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue): Deleted.
+        (WebCore::AVFWrapper::seekToTime): Deleted.
+        (WebCore::LegibleOutputData::LegibleOutputData): Deleted.
+        (WebCore::AVFWrapper::createImageForTimeInRect): Deleted.
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded):
+        (WebCore::MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue): Deleted.
+        (WebCore::AVFWrapper::seekToTime): Deleted.
+        (WebCore::LegibleOutputData::LegibleOutputData): Deleted.
+        (WebCore::AVFWrapper::createImageForTimeInRect): Deleted.
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::cancelLoad):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::platformDuration):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::currentMediaTime):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::seekToTime):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::processCue):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::metadataDidArrive):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::durationDidChange):
+        (-[WebCoreAVFMovieObserver observeValueForKeyPath:ofObject:change:context:]):
+        (-[WebCoreAVFMovieObserver legibleOutput:didOutputAttributedStrings:nativeSampleBuffers:forItemTime:]):
+        (WebCore::MediaPlayerPrivateAVFoundationObjC::currentTime): Deleted.
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::startTime):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::initialTime):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekInternal):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::seekable):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::totalFrameDelay):
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::durationDouble): Deleted.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble): Deleted.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::startTimeDouble): Deleted.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::maxTimeSeekableDouble): Deleted.
+        (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::minTimeSeekable): Deleted.
+        * platform/graphics/avfoundation/objc/OutOfBandTextTrackPrivateAVF.h:
+        * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
+        * platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h:
+        (WebCore::InbandMetadataTextTrackPrivateGStreamer::addDataCue):
+        * platform/graphics/gstreamer/InbandMetadataTextTrackPrivateGStreamer.h:
+        (WebCore::InbandMetadataTextTrackPrivateGStreamer::addDataCue):
+        * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
+        (WebCore::MediaPlayerPrivateGStreamer::processMpegTsSection):
+        (WebCore::MediaPlayerPrivateGStreamer::processTableOfContentsEntry):
+        (WebCore::MediaPlayerPrivateGStreamer::processMpegTsSection):
+        (WebCore::MediaPlayerPrivateGStreamer::processTableOfContentsEntry):
+        * platform/graphics/mac/MediaPlayerPrivateQTKit.h:
+        * platform/graphics/mac/MediaPlayerPrivateQTKit.mm:
+        * platform/graphics/mac/MediaPlayerPrivateQTKit.mm:
+        (WebCore::maxValueForTimeRanges):
+        (WebCore::MediaPlayerPrivateQTKit::MediaPlayerPrivateQTKit):
+        (WebCore::MediaPlayerPrivateQTKit::durationMediaTime):
+        (WebCore::MediaPlayerPrivateQTKit::currentMediaTime):
+        (WebCore::MediaPlayerPrivateQTKit::seek):
+        (WebCore::MediaPlayerPrivateQTKit::doSeek):
+        (WebCore::MediaPlayerPrivateQTKit::cancelSeek):
+        (WebCore::MediaPlayerPrivateQTKit::seekTimerFired):
+        (WebCore::MediaPlayerPrivateQTKit::seeking):
+        (WebCore::MediaPlayerPrivateQTKit::setPreservesPitch):
+        (WebCore::MediaPlayerPrivateQTKit::buffered):
+        (WebCore::MediaPlayerPrivateQTKit::maxMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateQTKit::maxMediaTimeLoaded):
+        (WebCore::MediaPlayerPrivateQTKit::didLoadingProgress):
+        (WebCore::MediaPlayerPrivateQTKit::updateStates):
+        (WebCore::MediaPlayerPrivateQTKit::timeChanged):
+        (WebCore::MediaPlayerPrivateQTKit::didEnd):
+        (WebCore::MediaPlayerPrivateQTKit::maxMediaTimeSeekable):
+        (WebCore::MediaPlayerPrivateQTKit::maxMediaTimeLoaded):
+        (WebCore::MediaPlayerPrivateQTKit::createQTTime): Deleted.
+        (WebCore::MediaPlayerPrivateQTKit::duration): Deleted.
+        (WebCore::MediaPlayerPrivateQTKit::currentTime): Deleted.
+        (WebCore::MediaPlayerPrivateQTKit::maxTimeSeekable): Deleted.
+        (WebCore::MediaPlayerPrivateQTKit::maxTimeLoaded): Deleted.
+        (WebCore::MediaPlayerPrivateQTKit::mediaTimeForTimeValue): Deleted.
+        * platform/mac/PlatformClockCM.mm:
+        * platform/mock/mediasource/MockMediaPlayerMediaSource.cpp:
+        (WebCore::MockMediaPlayerMediaSource::maxMediaTimeSeekable):
+        (WebCore::MockMediaPlayerMediaSource::currentMediaTime):
+        (WebCore::MockMediaPlayerMediaSource::durationMediaTime):
+        (WebCore::MockMediaPlayerMediaSource::seekWithTolerance):
+        (WebCore::MockMediaPlayerMediaSource::totalFrameDelay):
+        (WebCore::MockMediaPlayerMediaSource::maxTimeSeekableDouble): Deleted.
+        (WebCore::MockMediaPlayerMediaSource::currentTimeDouble): Deleted.
+        (WebCore::MockMediaPlayerMediaSource::durationDouble): Deleted.
+        * platform/mock/mediasource/MockMediaPlayerMediaSource.h:
+        * platform/mock/mediasource/MockMediaSourcePrivate.cpp:
+        (WebCore::MockMediaSourcePrivate::MockMediaSourcePrivate):
+        * platform/mock/mediasource/MockMediaSourcePrivate.h:
+        * platform/mock/mediasource/MockSourceBufferPrivate.cpp:
+        (WebCore::MockSourceBufferPrivate::enqueueSample):
+
 2014-09-05  Antti Koivisto  <antti@apple.com>
 
         REGRESSION(r173272): Two blob tests failing on WK1
index adb12bb..f178393 100644 (file)
@@ -76,7 +76,7 @@ PassRefPtr<MediaSource> MediaSource::create(ScriptExecutionContext& context)
 MediaSource::MediaSource(ScriptExecutionContext& context)
     : ActiveDOMObject(&context)
     , m_mediaElement(0)
-    , m_duration(std::numeric_limits<double>::quiet_NaN())
+    , m_duration(MediaTime::invalidTime())
     , m_pendingSeekTime(MediaTime::invalidTime())
     , m_readyState(closedKeyword())
     , m_asyncEventQueue(*this)
@@ -128,58 +128,56 @@ void MediaSource::removedFromRegistry()
     unsetPendingActivity(this);
 }
 
-double MediaSource::duration() const
+MediaTime MediaSource::duration() const
 {
     return m_duration;
 }
 
-double MediaSource::currentTime() const
+MediaTime MediaSource::currentTime() const
 {
-    return m_mediaElement ? m_mediaElement->currentTime() : 0;
+    return m_mediaElement ? m_mediaElement->currentMediaTime() : MediaTime::zeroTime();
 }
 
 std::unique_ptr<PlatformTimeRanges> MediaSource::buffered() const
 {
     // Implements MediaSource algorithm for HTMLMediaElement.buffered.
     // https://dvcs.w3.org/hg/html-media/raw-file/default/media-source/media-source.html#htmlmediaelement-extensions
-    Vector<RefPtr<TimeRanges>> ranges = activeRanges();
+    Vector<PlatformTimeRanges> activeRanges = this->activeRanges();
 
     // 1. If activeSourceBuffers.length equals 0 then return an empty TimeRanges object and abort these steps.
-    if (ranges.isEmpty())
+    if (activeRanges.isEmpty())
         return PlatformTimeRanges::create();
 
     // 2. Let active ranges be the ranges returned by buffered for each SourceBuffer object in activeSourceBuffers.
     // 3. Let highest end time be the largest range end time in the active ranges.
-    double highestEndTime = -1;
-    for (size_t i = 0; i < ranges.size(); ++i) {
-        unsigned length = ranges[i]->length();
+    MediaTime highestEndTime = MediaTime::zeroTime();
+    for (auto& ranges : activeRanges) {
+        unsigned length = ranges.length();
         if (length)
-            highestEndTime = std::max(highestEndTime, ranges[i]->end(length - 1, ASSERT_NO_EXCEPTION));
+            highestEndTime = std::max(highestEndTime, ranges.end(length - 1));
     }
 
     // Return an empty range if all ranges are empty.
-    if (highestEndTime < 0)
+    if (!highestEndTime)
         return PlatformTimeRanges::create();
 
     // 4. Let intersection ranges equal a TimeRange object containing a single range from 0 to highest end time.
-    RefPtr<TimeRanges> intersectionRanges = TimeRanges::create(0, highestEndTime);
+    PlatformTimeRanges intersectionRanges(MediaTime::zeroTime(), highestEndTime);
 
     // 5. For each SourceBuffer object in activeSourceBuffers run the following steps:
     bool ended = readyState() == endedKeyword();
-    for (size_t i = 0; i < ranges.size(); ++i) {
+    for (auto& sourceRanges : activeRanges) {
         // 5.1 Let source ranges equal the ranges returned by the buffered attribute on the current SourceBuffer.
-        TimeRanges* sourceRanges = ranges[i].get();
-
         // 5.2 If readyState is "ended", then set the end time on the last range in source ranges to highest end time.
-        if (ended && sourceRanges->length())
-            sourceRanges->add(sourceRanges->start(sourceRanges->length() - 1, ASSERT_NO_EXCEPTION), highestEndTime);
+        if (ended && sourceRanges.length())
+            sourceRanges.add(sourceRanges.start(sourceRanges.length() - 1), highestEndTime);
 
         // 5.3 Let new intersection ranges equal the the intersection between the intersection ranges and the source ranges.
         // 5.4 Replace the ranges in intersection ranges with the new intersection ranges.
-        intersectionRanges->intersectWith(*sourceRanges);
+        intersectionRanges.intersectWith(sourceRanges);
     }
 
-    return PlatformTimeRanges::create(intersectionRanges->ranges());
+    return PlatformTimeRanges::create(intersectionRanges);
 }
 
 void MediaSource::seekToTime(const MediaTime& time)
@@ -343,10 +341,10 @@ void MediaSource::setDuration(double duration, ExceptionCode& ec)
     }
 
     // 4. Run the duration change algorithm with new duration set to the value being assigned to this attribute.
-    setDurationInternal(duration);
+    setDurationInternal(MediaTime::createWithDouble(duration));
 }
 
-void MediaSource::setDurationInternal(double duration)
+void MediaSource::setDurationInternal(const MediaTime& duration)
 {
     // Duration Change Algorithm
     // https://dvcs.w3.org/hg/html-media/raw-file/tip/media-source/media-source.html#duration-change-algorithm
@@ -356,14 +354,14 @@ void MediaSource::setDurationInternal(double duration)
         return;
 
     // 2. Set old duration to the current value of duration.
-    double oldDuration = m_duration;
+    MediaTime oldDuration = m_duration;
 
     // 3. Update duration to new duration.
     m_duration = duration;
 
     // 4. If the new duration is less than old duration, then call remove(new duration, old duration)
     // on all objects in sourceBuffers.
-    if (!isnan(oldDuration) && duration < oldDuration) {
+    if (!oldDuration.isValid() && duration < oldDuration) {
         for (auto& sourceBuffer : *m_sourceBuffers)
             sourceBuffer->remove(duration, oldDuration, IGNORE_EXCEPTION);
     }
@@ -376,7 +374,7 @@ void MediaSource::setDurationInternal(double duration)
     // NOTE: Assume UA is able to partially render audio frames.
 
     // 6. Update the media controller duration to new duration and run the HTMLMediaElement duration change algorithm.
-    LOG(MediaSource, "MediaSource::setDurationInternal(%p) - duration(%g)", this, duration);
+    LOG(MediaSource, "MediaSource::setDurationInternal(%p) - duration(%g)", this, duration.toDouble());
     m_private->durationChanged();
 }
 
@@ -390,7 +388,7 @@ void MediaSource::setReadyState(const AtomicString& state)
     if (state == closedKeyword()) {
         m_private.clear();
         m_mediaElement = 0;
-        m_duration = std::numeric_limits<double>::quiet_NaN();
+        m_duration = MediaTime::invalidTime();
     }
 
     if (oldState == state)
@@ -446,10 +444,10 @@ void MediaSource::streamEndedWithError(const AtomicString& error, ExceptionCode&
         // ↳ If error is not set, is null, or is an empty string
         // 1. Run the duration change algorithm with new duration set to the highest end time reported by
         // the buffered attribute across all SourceBuffer objects in sourceBuffers.
-        double maxEndTime = 0;
+        MediaTime maxEndTime;
         for (auto& sourceBuffer : *m_sourceBuffers) {
             if (auto length = sourceBuffer->buffered()->length())
-                maxEndTime = std::max(sourceBuffer->buffered()->end(length - 1, IGNORE_EXCEPTION), maxEndTime);
+                maxEndTime = std::max(sourceBuffer->buffered()->ranges().end(length - 1), maxEndTime);
         }
         setDurationInternal(maxEndTime);
 
@@ -814,12 +812,11 @@ void MediaSource::onReadyStateChange(const AtomicString& oldState, const AtomicS
     scheduleEvent(eventNames().sourcecloseEvent);
 }
 
-Vector<RefPtr<TimeRanges>> MediaSource::activeRanges() const
+Vector<PlatformTimeRanges> MediaSource::activeRanges() const
 {
-    Vector<RefPtr<TimeRanges>> activeRanges(m_activeSourceBuffers->length());
-    for (size_t i = 0, length = m_activeSourceBuffers->length(); i < length; ++i)
-        activeRanges[i] = m_activeSourceBuffers->item(i)->buffered(ASSERT_NO_EXCEPTION);
-
+    Vector<PlatformTimeRanges> activeRanges(m_activeSourceBuffers->length());
+    for (auto& sourceBuffer : *m_activeSourceBuffers)
+        activeRanges.append(sourceBuffer->buffered()->ranges());
     return activeRanges;
 }
 
index bd4b240..fad4dc1 100644 (file)
@@ -72,7 +72,7 @@ public:
 
     // MediaSourcePrivateClient
     virtual void setPrivateAndOpen(PassRef<MediaSourcePrivate>) override;
-    virtual double duration() const override;
+    virtual MediaTime duration() const override;
     virtual std::unique_ptr<PlatformTimeRanges> buffered() const override;
     virtual void seekToTime(const MediaTime&) override;
 
@@ -83,8 +83,8 @@ public:
     void completeSeek();
 
     void setDuration(double, ExceptionCode&);
-    void setDurationInternal(double);
-    double currentTime() const;
+    void setDurationInternal(const MediaTime&);
+    MediaTime currentTime() const;
     const AtomicString& readyState() const { return m_readyState; }
     void setReadyState(const AtomicString&);
     void endOfStream(ExceptionCode&);
@@ -119,7 +119,7 @@ protected:
     explicit MediaSource(ScriptExecutionContext&);
 
     void onReadyStateChange(const AtomicString& oldState, const AtomicString& newState);
-    Vector<RefPtr<TimeRanges>> activeRanges() const;
+    Vector<PlatformTimeRanges> activeRanges() const;
 
     RefPtr<SourceBufferPrivate> createSourceBufferPrivate(const ContentType&, ExceptionCode&);
     void scheduleEvent(const AtomicString& eventName);
@@ -133,7 +133,7 @@ protected:
     RefPtr<SourceBufferList> m_sourceBuffers;
     RefPtr<SourceBufferList> m_activeSourceBuffers;
     HTMLMediaElement* m_mediaElement;
-    double m_duration;
+    MediaTime m_duration;
     MediaTime m_pendingSeekTime;
     AtomicString m_readyState;
     GenericEventQueue m_asyncEventQueue;
index 764f27e..97327aa 100644 (file)
@@ -241,12 +241,17 @@ void SourceBuffer::abort(ExceptionCode& ec)
 
 void SourceBuffer::remove(double start, double end, ExceptionCode& ec)
 {
-    LOG(MediaSource, "SourceBuffer::remove(%p) - start(%lf), end(%lf)", this, start, end);
+    remove(MediaTime::createWithDouble(start), MediaTime::createWithDouble(end), ec);
+}
+
+void SourceBuffer::remove(const MediaTime& start, const MediaTime& end, ExceptionCode& ec)
+{
+    LOG(MediaSource, "SourceBuffer::remove(%p) - start(%lf), end(%lf)", this, start.toDouble(), end.toDouble());
 
     // Section 3.2 remove() method steps.
     // 1. If start is negative or greater than duration, then throw an InvalidAccessError exception and abort these steps.
     // 2. If end is less than or equal to start, then throw an InvalidAccessError exception and abort these steps.
-    if (start < 0 || (m_source && (std::isnan(m_source->duration()) || start > m_source->duration())) || end <= start) {
+    if (start < MediaTime::zeroTime() || (m_source && (!m_source->duration().isValid() || start > m_source->duration())) || end <= start) {
         ec = INVALID_ACCESS_ERR;
         return;
     }
@@ -271,8 +276,8 @@ void SourceBuffer::remove(double start, double end, ExceptionCode& ec)
     scheduleEvent(eventNames().updatestartEvent);
 
     // 8. Return control to the caller and run the rest of the steps asynchronously.
-    m_pendingRemoveStart = MediaTime::createWithDouble(start);
-    m_pendingRemoveEnd = MediaTime::createWithDouble(end);
+    m_pendingRemoveStart = start;
+    m_pendingRemoveEnd = end;
     m_removeTimer.startOneShot(0);
 }
 
@@ -513,7 +518,7 @@ void SourceBuffer::sourceBufferPrivateAppendComplete(SourceBufferPrivate*, Appen
     if (m_source)
         m_source->monitorSourceBuffers();
 
-    MediaTime currentMediaTime = MediaTime::createWithDouble(m_source->currentTime());
+    MediaTime currentMediaTime = m_source->currentTime();
     for (auto& trackBufferPair : m_trackBufferMap) {
         TrackBuffer& trackBuffer = trackBufferPair.value;
         const AtomicString& trackID = trackBufferPair.key;
@@ -606,8 +611,8 @@ void SourceBuffer::removeCodedFrames(const MediaTime& start, const MediaTime& en
     // https://dvcs.w3.org/hg/html-media/raw-file/tip/media-source/media-source.html#sourcebuffer-coded-frame-removal
 
     // 1. Let start be the starting presentation timestamp for the removal range.
-    MediaTime durationMediaTime = MediaTime::createWithDouble(m_source->duration());
-    MediaTime currentMediaTime = MediaTime::createWithDouble(m_source->currentTime());
+    MediaTime durationMediaTime = m_source->duration();
+    MediaTime currentMediaTime = m_source->currentTime();
 
     // 2. Let end be the end presentation timestamp for the removal range.
     // 3. For each track buffer in this source buffer, run the following steps:
@@ -713,11 +718,11 @@ void SourceBuffer::evictCodedFrames(size_t newDataSize)
     // NOTE: begin by removing data from the beginning of the buffered ranges, 30 seconds at
     // a time, up to 30 seconds before currentTime.
     MediaTime thirtySeconds = MediaTime(30, 1);
-    MediaTime currentTime = MediaTime::createWithDouble(m_source->currentTime());
+    MediaTime currentTime = m_source->currentTime();
     MediaTime maximumRangeEnd = currentTime - thirtySeconds;
 
 #if !LOG_DISABLED
-    LOG(MediaSource, "SourceBuffer::evictCodedFrames(%p) - currentTime = %lf, require %zu bytes, maximum buffer size is %zu", this, m_source->currentTime(), extraMemoryCost() + newDataSize, maximumBufferSize);
+    LOG(MediaSource, "SourceBuffer::evictCodedFrames(%p) - currentTime = %lf, require %zu bytes, maximum buffer size is %zu", this, m_source->currentTime().toDouble(), extraMemoryCost() + newDataSize, maximumBufferSize);
     size_t initialBufferedSize = extraMemoryCost();
 #endif
 
@@ -753,7 +758,7 @@ void SourceBuffer::evictCodedFrames(size_t newDataSize)
 
     MediaTime minimumRangeStart = currentTime + thirtySeconds;
 
-    rangeEnd = MediaTime::createWithDouble(m_source->duration());
+    rangeEnd = m_source->duration();
     rangeStart = rangeEnd - thirtySeconds;
     while (rangeStart > minimumRangeStart) {
 
@@ -868,13 +873,13 @@ void SourceBuffer::sourceBufferPrivateDidReceiveInitializationSegment(SourceBuff
     // 3.5.7 Initialization Segment Received
     // https://dvcs.w3.org/hg/html-media/raw-file/default/media-source/media-source.html#sourcebuffer-init-segment-received
     // 1. Update the duration attribute if it currently equals NaN:
-    if (std::isnan(m_source->duration())) {
+    if (m_source->duration().isInvalid()) {
         // ↳ If the initialization segment contains a duration:
         //   Run the duration change algorithm with new duration set to the duration in the initialization segment.
         // ↳ Otherwise:
         //   Run the duration change algorithm with new duration set to positive Infinity.
         MediaTime newDuration = segment.duration.isValid() ? segment.duration : MediaTime::positiveInfiniteTime();
-        m_source->setDurationInternal(newDuration.toDouble());
+        m_source->setDurationInternal(newDuration);
     }
 
     // 2. If the initialization segment has no audio, video, or text tracks, then run the end of stream
@@ -1186,7 +1191,7 @@ void SourceBuffer::sourceBufferPrivateDidReceiveSample(SourceBufferPrivate*, Pas
         // FIXME: add support for "sequence" mode
 
         // 1.5 If timestampOffset is not 0, then run the following steps:
-        if (m_timestampOffset != MediaTime::zeroTime()) {
+        if (m_timestampOffset) {
             // 1.5.1 Add timestampOffset to the presentation timestamp.
             presentationTimestamp += m_timestampOffset;
 
@@ -1366,7 +1371,7 @@ void SourceBuffer::sourceBufferPrivateDidReceiveSample(SourceBufferPrivate*, Pas
 
             // Only force the TrackBuffer to re-enqueue if the removed ranges overlap with enqueued and possibly
             // not yet displayed samples.
-            MediaTime currentMediaTime = MediaTime::createWithDouble(m_source->currentTime());
+            MediaTime currentMediaTime = m_source->currentTime();
             if (currentMediaTime < trackBuffer.lastEnqueuedPresentationTime) {
                 PlatformTimeRanges possiblyEnqueuedRanges(currentMediaTime, trackBuffer.lastEnqueuedPresentationTime);
                 possiblyEnqueuedRanges.intersectWith(erasedRanges->ranges());
@@ -1375,7 +1380,7 @@ void SourceBuffer::sourceBufferPrivateDidReceiveSample(SourceBufferPrivate*, Pas
             }
 
             erasedRanges->invert();
-            m_buffered->intersectWith(*erasedRanges.get());
+            m_buffered->intersectWith(*erasedRanges);
         }
 
         // 1.17 If spliced audio frame is set:
@@ -1420,8 +1425,8 @@ void SourceBuffer::sourceBufferPrivateDidReceiveSample(SourceBufferPrivate*, Pas
 
     // 5. If the media segment contains data beyond the current duration, then run the duration change algorithm with new
     // duration set to the maximum of the current duration and the highest end timestamp reported by HTMLMediaElement.buffered.
-    if (highestPresentationEndTimestamp().toDouble() > m_source->duration())
-        m_source->setDurationInternal(highestPresentationEndTimestamp().toDouble());
+    if (highestPresentationEndTimestamp() > m_source->duration())
+        m_source->setDurationInternal(highestPresentationEndTimestamp());
 }
 
 bool SourceBuffer::hasAudio() const
@@ -1674,7 +1679,7 @@ std::unique_ptr<PlatformTimeRanges> SourceBuffer::bufferedAccountingForEndOfStre
     std::unique_ptr<PlatformTimeRanges> virtualRanges = PlatformTimeRanges::create(m_buffered->ranges());
     if (m_source->isEnded()) {
         MediaTime start = virtualRanges->maximumBufferedTime();
-        MediaTime end = MediaTime::createWithDouble(m_source->duration());
+        MediaTime end = m_source->duration();
         if (start <= end)
             virtualRanges->add(start, end);
     }
@@ -1686,8 +1691,8 @@ bool SourceBuffer::hasCurrentTime() const
     if (isRemoved() || !m_buffered->length())
         return false;
 
-    MediaTime currentTime = MediaTime::createWithDouble(m_source->currentTime());
-    MediaTime duration = MediaTime::createWithDouble(m_source->duration());
+    MediaTime currentTime = m_source->currentTime();
+    MediaTime duration = m_source->duration();
     if (currentTime >= duration)
         return true;
 
@@ -1704,8 +1709,8 @@ bool SourceBuffer::hasFutureTime() const
     if (!ranges->length())
         return false;
 
-    MediaTime currentTime = MediaTime::createWithDouble(m_source->currentTime());
-    MediaTime duration = MediaTime::createWithDouble(m_source->duration());
+    MediaTime currentTime = m_source->currentTime();
+    MediaTime duration = m_source->duration();
     if (currentTime >= duration)
         return true;
 
@@ -1737,8 +1742,8 @@ bool SourceBuffer::canPlayThrough()
         return true;
 
     // Add up all the time yet to be buffered.
-    MediaTime currentTime = MediaTime::createWithDouble(m_source->currentTime());
-    MediaTime duration = MediaTime::createWithDouble(m_source->duration());
+    MediaTime currentTime = m_source->currentTime();
+    MediaTime duration = m_source->duration();
 
     std::unique_ptr<PlatformTimeRanges> unbufferedRanges = bufferedAccountingForEndOfStream();
     unbufferedRanges->invert();
index 25c748f..ab48f93 100644 (file)
@@ -75,6 +75,7 @@ public:
     void appendBuffer(PassRefPtr<ArrayBufferView> data, ExceptionCode&);
     void abort(ExceptionCode&);
     void remove(double start, double end, ExceptionCode&);
+    void remove(const MediaTime&, const MediaTime&, ExceptionCode&);
 
     void abortIfUpdating();
     void removedFromMediaSource();
index 467eec5..03b07c7 100644 (file)
       <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release_WinCairo|x64'">true</ExcludedFromBuild>
     </ClCompile>
     <ClCompile Include="..\platform\graphics\avfoundation\MediaPlayerPrivateAVFoundation.cpp" />
+    <ClCompile Include="..\platform\graphics\avfoundation\MediaTimeAVFoundation.cpp" />
     <ClCompile Include="..\platform\graphics\avfoundation\cf\MediaPlayerPrivateAVFoundationCF.cpp" />
     <ClCompile Include="..\platform\network\AuthenticationChallengeBase.cpp" />
     <ClCompile Include="..\platform\network\BlobData.cpp" />
     <ClInclude Include="..\platform\graphics\ca\win\PlatformCALayerWinInternal.h" />
     <ClInclude Include="..\platform\graphics\ca\win\WKCACFViewLayerTreeHost.h" />
     <ClInclude Include="..\platform\graphics\avfoundation\MediaPlayerPrivateAVFoundation.h" />
+    <ClInclude Include="..\platform\graphics\avfoundation\MediaTimeAVFoundation.h" />
     <ClInclude Include="..\platform\graphics\avfoundation\cf\AVFoundationCFSoftLinking.h" />
     <ClInclude Include="..\platform\graphics\avfoundation\cf\CoreMediaSoftLinking.h" />
     <ClInclude Include="..\platform\graphics\avfoundation\cf\MediaPlayerPrivateAVFoundationCF.h" />
index 8841b79..3f96023 100644 (file)
                CD54DE4B17469C6D005E5B36 /* AudioSessionMac.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CD54DE4917469C6D005E5B36 /* AudioSessionMac.cpp */; };
                CD5596911475B678001D0BD0 /* AudioFileReaderIOS.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CD55968F1475B678001D0BD0 /* AudioFileReaderIOS.cpp */; };
                CD5596921475B678001D0BD0 /* AudioFileReaderIOS.h in Headers */ = {isa = PBXBuildFile; fileRef = CD5596901475B678001D0BD0 /* AudioFileReaderIOS.h */; };
+               CD60C0C6193E87C7003C656B /* MediaTimeQTKit.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD60C0C4193E87C7003C656B /* MediaTimeQTKit.mm */; };
+               CD60C0C7193E87C7003C656B /* MediaTimeQTKit.h in Headers */ = {isa = PBXBuildFile; fileRef = CD60C0C5193E87C7003C656B /* MediaTimeQTKit.h */; };
                CD61FE671794AADB004101EB /* MediaSourceRegistry.cpp in Sources */ = {isa = PBXBuildFile; fileRef = B1A942E115B5CE2200D525D1 /* MediaSourceRegistry.cpp */; };
                CD61FE681794AADB004101EB /* MediaSourceRegistry.h in Headers */ = {isa = PBXBuildFile; fileRef = B1A942E215B5CE2200D525D1 /* MediaSourceRegistry.h */; };
                CD641EB31818F5ED00EE4C41 /* MediaSourcePrivate.h in Headers */ = {isa = PBXBuildFile; fileRef = CD641EB11818F5ED00EE4C41 /* MediaSourcePrivate.h */; };
                CD641EB41818F5ED00EE4C41 /* SourceBufferPrivate.h in Headers */ = {isa = PBXBuildFile; fileRef = CD641EB21818F5ED00EE4C41 /* SourceBufferPrivate.h */; };
-               CD641EBF1819B36000EE4C41 /* MediaTimeMac.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CD641EBD1819B35900EE4C41 /* MediaTimeMac.cpp */; };
-               CD641EC01819B36000EE4C41 /* MediaTimeMac.h in Headers */ = {isa = PBXBuildFile; fileRef = CD641EBE1819B35900EE4C41 /* MediaTimeMac.h */; };
+               CD641EBF1819B36000EE4C41 /* MediaTimeAVFoundation.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CD641EBD1819B35900EE4C41 /* MediaTimeAVFoundation.cpp */; };
+               CD641EC01819B36000EE4C41 /* MediaTimeAVFoundation.h in Headers */ = {isa = PBXBuildFile; fileRef = CD641EBE1819B35900EE4C41 /* MediaTimeAVFoundation.h */; };
                CD7DBB2818CA19A400C11066 /* CSSGridLineNamesValue.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CD7DBB2618CA11FF00C11066 /* CSSGridLineNamesValue.cpp */; };
                CD7DBB2918CA19C600C11066 /* CSSGridLineNamesValue.h in Headers */ = {isa = PBXBuildFile; fileRef = CD7DBB2718CA11FF00C11066 /* CSSGridLineNamesValue.h */; };
                CD7E05221651C28200C1201F /* WebCoreAVFResourceLoader.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD7E05211651A84100C1201F /* WebCoreAVFResourceLoader.mm */; };
                CD54DE4917469C6D005E5B36 /* AudioSessionMac.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AudioSessionMac.cpp; sourceTree = "<group>"; };
                CD55968F1475B678001D0BD0 /* AudioFileReaderIOS.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = AudioFileReaderIOS.cpp; path = ios/AudioFileReaderIOS.cpp; sourceTree = "<group>"; };
                CD5596901475B678001D0BD0 /* AudioFileReaderIOS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = AudioFileReaderIOS.h; path = ios/AudioFileReaderIOS.h; sourceTree = "<group>"; };
+               CD60C0C4193E87C7003C656B /* MediaTimeQTKit.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = MediaTimeQTKit.mm; sourceTree = "<group>"; };
+               CD60C0C5193E87C7003C656B /* MediaTimeQTKit.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MediaTimeQTKit.h; sourceTree = "<group>"; };
                CD641EB11818F5ED00EE4C41 /* MediaSourcePrivate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MediaSourcePrivate.h; sourceTree = "<group>"; };
                CD641EB21818F5ED00EE4C41 /* SourceBufferPrivate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SourceBufferPrivate.h; sourceTree = "<group>"; };
-               CD641EBD1819B35900EE4C41 /* MediaTimeMac.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = MediaTimeMac.cpp; sourceTree = "<group>"; };
-               CD641EBE1819B35900EE4C41 /* MediaTimeMac.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MediaTimeMac.h; sourceTree = "<group>"; };
+               CD641EBD1819B35900EE4C41 /* MediaTimeAVFoundation.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = MediaTimeAVFoundation.cpp; sourceTree = "<group>"; };
+               CD641EBE1819B35900EE4C41 /* MediaTimeAVFoundation.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MediaTimeAVFoundation.h; sourceTree = "<group>"; };
                CD641EC7181ED60100EE4C41 /* MediaSample.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MediaSample.h; sourceTree = "<group>"; };
                CD7DBB2618CA11FF00C11066 /* CSSGridLineNamesValue.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CSSGridLineNamesValue.cpp; sourceTree = "<group>"; };
                CD7DBB2718CA11FF00C11066 /* CSSGridLineNamesValue.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CSSGridLineNamesValue.h; sourceTree = "<group>"; };
                                07B442D5166C70B000556CAD /* InbandTextTrackPrivateAVF.h */,
                                076F0D0912B8192700C26AA4 /* MediaPlayerPrivateAVFoundation.cpp */,
                                076F0D0A12B8192700C26AA4 /* MediaPlayerPrivateAVFoundation.h */,
+                               CD641EBD1819B35900EE4C41 /* MediaTimeAVFoundation.cpp */,
+                               CD641EBE1819B35900EE4C41 /* MediaTimeAVFoundation.h */,
                                CD336F6317FA0A4D00DDDCD0 /* VideoTrackPrivateAVF.h */,
                        );
                        path = avfoundation;
                                06E81EEB0AB5DA9700C87837 /* LocalCurrentGraphicsContext.mm */,
                                BC25B528131C6D3900180E10 /* LocalizedStringsMac.cpp */,
                                1402645D0AFDC19B005919E2 /* LoggingMac.mm */,
-                               CD641EBD1819B35900EE4C41 /* MediaTimeMac.cpp */,
-                               CD641EBE1819B35900EE4C41 /* MediaTimeMac.h */,
                                BC772C5D0C4EB3440083285F /* MIMETypeRegistryMac.mm */,
                                93500F3113FDE3BE0099EC24 /* NSScrollerImpDetails.h */,
                                52F52E1014A0134F00ACC397 /* NSScrollerImpDetails.mm */,
                                49FFBF3E11C93EE3006A7118 /* WebGLLayer.mm */,
                                0FCF332B0F2B9A25004B6795 /* WebLayer.h */,
                                0FCF332A0F2B9A25004B6795 /* WebLayer.mm */,
+                               CD60C0C4193E87C7003C656B /* MediaTimeQTKit.mm */,
+                               CD60C0C5193E87C7003C656B /* MediaTimeQTKit.h */,
                        );
                        path = mac;
                        sourceTree = "<group>";
                                076306D017E1478D005A7C4E /* MediaStreamTrackSourcesCallback.h in Headers */,
                                076306D317E1478D005A7C4E /* MediaStreamTrackSourcesRequest.h in Headers */,
                                076306E317E22A43005A7C4E /* MediaStreamTrackSourcesRequestClient.h in Headers */,
-                               CD641EC01819B36000EE4C41 /* MediaTimeMac.h in Headers */,
+                               CD641EC01819B36000EE4C41 /* MediaTimeAVFoundation.h in Headers */,
                                0705852117FDC140005F2BCB /* MediaTrackConstraint.h in Headers */,
                                0705852417FDC140005F2BCB /* MediaTrackConstraints.h in Headers */,
                                0705852917FDE02C005F2BCB /* MediaTrackConstraintSet.h in Headers */,
                                436708E912D9CA4B00044234 /* RenderSVGTransformableContainer.h in Headers */,
                                0854B01D1255E4E600B9CDD0 /* RenderSVGTSpan.h in Headers */,
                                436708EB12D9CA4B00044234 /* RenderSVGViewportContainer.h in Headers */,
+                               CD60C0C7193E87C7003C656B /* MediaTimeQTKit.h in Headers */,
                                A8DF4AEB0980C42C0052981B /* RenderTable.h in Headers */,
                                6ED878C5147493F4004C3597 /* RenderTableCaption.h in Headers */,
                                A8DF4AE90980C42C0052981B /* RenderTableCell.h in Headers */,
                                078E090317D14CEE00420AA1 /* MediaStreamTrackEvent.cpp in Sources */,
                                07FFDE68181AED420072D409 /* MediaStreamTrackPrivate.cpp in Sources */,
                                076306D217E1478D005A7C4E /* MediaStreamTrackSourcesRequest.cpp in Sources */,
-                               CD641EBF1819B36000EE4C41 /* MediaTimeMac.cpp in Sources */,
+                               CD641EBF1819B36000EE4C41 /* MediaTimeAVFoundation.cpp in Sources */,
                                0705853A17FE0770005F2BCB /* MediaTrackConstraint.cpp in Sources */,
                                0705852317FDC140005F2BCB /* MediaTrackConstraints.cpp in Sources */,
                                0705853817FE044F005F2BCB /* MediaTrackConstraintSet.cpp in Sources */,
                                51E0BABC0DA5547100A9E417 /* StorageEvent.cpp in Sources */,
                                C5E9B67710697E1300C7BB1A /* StorageEventDispatcher.cpp in Sources */,
                                51E0BB390DA5ACB600A9E417 /* StorageMap.cpp in Sources */,
+                               CD60C0C6193E87C7003C656B /* MediaTimeQTKit.mm in Sources */,
                                C50D0E820FF4272900AC2644 /* StorageNamespace.cpp in Sources */,
                                C55E38C010040D5D00A56BDB /* StorageNamespaceImpl.cpp in Sources */,
                                5D5975B419635F1100D00878 /* SystemVersion.mm in Sources */,
index 8757b8c..2a3c9a8 100644 (file)
@@ -88,7 +88,7 @@ EncodedJSValue JSC_HOST_CALL constructJSDataCue(ExecState* exec)
         if (UNLIKELY(exec->hadException()))
             return JSValue::encode(jsUndefined());
 
-        object = DataCue::create(*context, startTime, endTime, data, type, ec);
+        object = DataCue::create(*context, MediaTime::createWithDouble(startTime), MediaTime::createWithDouble(endTime), data, type, ec);
         if (ec) {
             setDOMException(exec, ec);
             return JSValue::encode(JSValue());
@@ -100,7 +100,7 @@ EncodedJSValue JSC_HOST_CALL constructJSDataCue(ExecState* exec)
 #if !ENABLE(DATACUE_VALUE)
     return JSValue::encode(jsUndefined());
 #else
-    object = DataCue::create(*context, startTime, endTime, valueArgument, type);
+    object = DataCue::create(*context, MediaTime::createWithDouble(startTime), MediaTime::createWithDouble(endTime), valueArgument, type);
     return JSValue::encode(asObject(toJS(exec, castedThis->globalObject(), object.get())));
 #endif
 }
index d33faed..1a44348 100644 (file)
@@ -268,10 +268,9 @@ HTMLMediaElement::HTMLMediaElement(const QualifiedName& tagName, Document& docum
     , m_readyStateMaximum(HAVE_NOTHING)
     , m_volume(1.0f)
     , m_volumeInitialized(false)
-    , m_lastSeekTime(0)
     , m_previousProgressTime(std::numeric_limits<double>::max())
     , m_clockTimeAtLastUpdateEvent(0)
-    , m_lastTimeUpdateEventMovieTime(std::numeric_limits<double>::max())
+    , m_lastTimeUpdateEventMovieTime(MediaTime::positiveInfiniteTime())
     , m_loadState(WaitingForSource)
 #if PLATFORM(IOS)
     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
@@ -282,11 +281,8 @@ HTMLMediaElement::HTMLMediaElement(const QualifiedName& tagName, Document& docum
 #if ENABLE(MEDIA_SOURCE)
     , m_droppedVideoFrames(0)
 #endif
-    , m_cachedTime(MediaPlayer::invalidTime())
     , m_clockTimeAtLastCachedTimeUpdate(0)
     , m_minimumClockTimeToUpdateCachedTime(0)
-    , m_fragmentStartTime(MediaPlayer::invalidTime())
-    , m_fragmentEndTime(MediaPlayer::invalidTime())
     , m_pendingActionFlags(0)
     , m_actionAfterScan(Nothing)
     , m_scanType(Scan)
@@ -319,7 +315,7 @@ HTMLMediaElement::HTMLMediaElement(const QualifiedName& tagName, Document& docum
     , m_tracksAreReady(true)
     , m_haveVisibleTextTrack(false)
     , m_processingPreferenceChange(false)
-    , m_lastTextTrackUpdateTime(-1)
+    , m_lastTextTrackUpdateTime(MediaTime(-1, 1))
     , m_captionDisplayMode(CaptionUserPreferences::Automatic)
     , m_audioTracks(0)
     , m_textTracks(0)
@@ -938,7 +934,7 @@ void HTMLMediaElement::prepareForLoad()
         updateMediaController();
 #if ENABLE(VIDEO_TRACK)
         if (RuntimeEnabledFeatures::sharedFeatures().webkitVideoTrackEnabled())
-            updateActiveTextTrackCues(0);
+            updateActiveTextTrackCues(MediaTime::zeroTime());
 #endif
     }
 
@@ -963,7 +959,7 @@ void HTMLMediaElement::prepareForLoad()
 
     // FIXME: Investigate whether these can be moved into m_networkState != NETWORK_EMPTY block above
     // so they are closer to the relevant spec steps.
-    m_lastSeekTime = 0;
+    m_lastSeekTime = MediaTime::zeroTime();
 
     // The spec doesn't say to block the load event until we actually run the asynchronous section
     // algorithm, but do it now because we won't start that until after the timer fires and the 
@@ -1236,13 +1232,12 @@ static bool trackIndexCompare(TextTrack* a,
     return a->trackIndex() - b->trackIndex() < 0;
 }
 
-static bool eventTimeCueCompare(const std::pair<double, TextTrackCue*>& a,
-                                const std::pair<double, TextTrackCue*>& b)
+static bool eventTimeCueCompare(const std::pair<MediaTime, TextTrackCue*>& a, const std::pair<MediaTime, TextTrackCue*>& b)
 {
     // 12 - Sort the tasks in events in ascending time order (tasks with earlier
     // times first).
     if (a.first != b.first)
-        return a.first - b.first < 0;
+        return a.first - b.first < MediaTime::zeroTime();
 
     // If the cues belong to different text tracks, it doesn't make sense to
     // compare the two tracks by the relative cue order, so return the relative
@@ -1262,7 +1257,7 @@ static bool compareCueInterval(const CueInterval& one, const CueInterval& two)
 };
 
 
-void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
+void HTMLMediaElement::updateActiveTextTrackCues(const MediaTime& movieTime)
 {
     // 4.8.10.8 Playing the media resource
 
@@ -1300,7 +1295,7 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
     // 3 - Let last time be the current playback position at the time this
     // algorithm was last run for this media element, if this is not the first
     // time it has run.
-    double lastTime = m_lastTextTrackUpdateTime;
+    MediaTime lastTime = m_lastTextTrackUpdateTime;
 
     // 4 - If the current playback position has, since the last time this
     // algorithm was run, only changed through its usual monotonic increase
@@ -1308,13 +1303,13 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
     // cues whose start times are greater than or equal to last time and whose
     // end times are less than or equal to the current playback position.
     // Otherwise, let missed cues be an empty list.
-    if (lastTime >= 0 && m_lastSeekTime < movieTime) {
+    if (lastTime >= MediaTime::zeroTime() && m_lastSeekTime < movieTime) {
         CueList potentiallySkippedCues =
             m_cueTree.allOverlaps(m_cueTree.createInterval(lastTime, movieTime));
 
         for (size_t i = 0; i < potentiallySkippedCues.size(); ++i) {
-            double cueStartTime = potentiallySkippedCues[i].low();
-            double cueEndTime = potentiallySkippedCues[i].high();
+            MediaTime cueStartTime = potentiallySkippedCues[i].low();
+            MediaTime cueEndTime = potentiallySkippedCues[i].high();
 
             // Consider cues that may have been missed since the last seek time.
             if (cueStartTime > std::max(m_lastSeekTime, lastTime) && cueEndTime < movieTime)
@@ -1382,7 +1377,7 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
     // 8 - Let events be a list of tasks, initially empty. Each task in this
     // list will be associated with a text track, a text track cue, and a time,
     // which are used to sort the list before the tasks are queued.
-    Vector<std::pair<double, TextTrackCue*>> eventTasks;
+    Vector<std::pair<MediaTime, TextTrackCue*>> eventTasks;
 
     // 8 - Let affected tracks be a list of text tracks, initially empty.
     Vector<TextTrack*> affectedTracks;
@@ -1390,7 +1385,7 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
     for (size_t i = 0; i < missedCuesSize; ++i) {
         // 9 - For each text track cue in missed cues, prepare an event named enter
         // for the TextTrackCue object with the text track cue start time.
-        eventTasks.append(std::make_pair(missedCues[i].data()->startTime(),
+        eventTasks.append(std::make_pair(missedCues[i].data()->startMediaTime(),
                                          missedCues[i].data()));
 
         // 10 - For each text track [...] in missed cues, prepare an event
@@ -1402,9 +1397,8 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
         // checked when these tasks are actually queued below. This doesn't
         // affect sorting events before dispatch either, because the exit
         // event has the same time as the enter event.
-        if (missedCues[i].data()->startTime() < missedCues[i].data()->endTime())
-            eventTasks.append(std::make_pair(missedCues[i].data()->endTime(),
-                                             missedCues[i].data()));
+        if (missedCues[i].data()->startMediaTime() < missedCues[i].data()->endMediaTime())
+            eventTasks.append(std::make_pair(missedCues[i].data()->endMediaTime(), missedCues[i].data()));
     }
 
     for (size_t i = 0; i < previousCuesSize; ++i) {
@@ -1412,7 +1406,7 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
         // track cue active flag set prepare an event named exit for the
         // TextTrackCue object with the text track cue end time.
         if (!currentCues.contains(previousCues[i]))
-            eventTasks.append(std::make_pair(previousCues[i].data()->endTime(),
+            eventTasks.append(std::make_pair(previousCues[i].data()->endMediaTime(),
                                              previousCues[i].data()));
     }
 
@@ -1421,7 +1415,7 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
         // text track cue active flag set, prepare an event named enter for the
         // TextTrackCue object with the text track cue start time.
         if (!previousCues.contains(currentCues[i]))
-            eventTasks.append(std::make_pair(currentCues[i].data()->startTime(),
+            eventTasks.append(std::make_pair(currentCues[i].data()->startMediaTime(),
                                              currentCues[i].data()));
     }
 
@@ -1449,7 +1443,7 @@ void HTMLMediaElement::updateActiveTextTrackCues(double movieTime)
             event->setTarget(eventTasks[i].second);
             m_asyncEventQueue.enqueueEvent(event.release());
         } else {
-            if (eventTasks[i].first == eventTasks[i].second->startTime())
+            if (eventTasks[i].first == eventTasks[i].second->startMediaTime())
                 event = Event::create(eventNames().enterEvent, false, false);
             else
                 event = Event::create(eventNames().exitEvent, false, false);
@@ -1602,7 +1596,7 @@ void HTMLMediaElement::endIgnoringTrackDisplayUpdateRequests()
     ASSERT(m_ignoreTrackDisplayUpdate);
     --m_ignoreTrackDisplayUpdate;
     if (!m_ignoreTrackDisplayUpdate && m_inActiveDocument)
-        updateActiveTextTrackCues(currentTime());
+        updateActiveTextTrackCues(currentMediaTime());
 }
 
 void HTMLMediaElement::textTrackAddCues(TextTrack* track, const TextTrackCueList* cues) 
@@ -1622,28 +1616,31 @@ void HTMLMediaElement::textTrackRemoveCues(TextTrack*, const TextTrackCueList* c
         textTrackRemoveCue(cues->item(i)->track(), cues->item(i));
 }
 
-void HTMLMediaElement::textTrackAddCue(TextTrack* track, PassRefPtr<TextTrackCue> cue)
+void HTMLMediaElement::textTrackAddCue(TextTrack* track, PassRefPtr<TextTrackCue> prpCue)
 {
     if (track->mode() == TextTrack::disabledKeyword())
         return;
 
+    RefPtr<TextTrackCue> cue = prpCue;
+
     // Negative duration cues need be treated in the interval tree as
     // zero-length cues.
-    double endTime = std::max(cue->startTime(), cue->endTime());
+    MediaTime endTime = std::max(cue->startMediaTime(), cue->endMediaTime());
 
-    CueInterval interval = m_cueTree.createInterval(cue->startTime(), endTime, cue.get());
+    CueInterval interval = m_cueTree.createInterval(cue->startMediaTime(), endTime, cue.get());
     if (!m_cueTree.contains(interval))
         m_cueTree.add(interval);
-    updateActiveTextTrackCues(currentTime());
+    updateActiveTextTrackCues(currentMediaTime());
 }
 
-void HTMLMediaElement::textTrackRemoveCue(TextTrack*, PassRefPtr<TextTrackCue> cue)
+void HTMLMediaElement::textTrackRemoveCue(TextTrack*, PassRefPtr<TextTrackCue> prpCue)
 {
+    RefPtr<TextTrackCue> cue = prpCue;
     // Negative duration cues need to be treated in the interval tree as
     // zero-length cues.
-    double endTime = std::max(cue->startTime(), cue->endTime());
+    MediaTime endTime = std::max(cue->startMediaTime(), cue->endMediaTime());
 
-    CueInterval interval = m_cueTree.createInterval(cue->startTime(), endTime, cue.get());
+    CueInterval interval = m_cueTree.createInterval(cue->startMediaTime(), endTime, cue.get());
     m_cueTree.remove(interval);
 
 #if ENABLE(WEBVTT_REGIONS)
@@ -1662,7 +1659,7 @@ void HTMLMediaElement::textTrackRemoveCue(TextTrack*, PassRefPtr<TextTrackCue> c
 
     if (cue->isRenderable())
         toVTTCue(cue.get())->removeDisplayTree();
-    updateActiveTextTrackCues(currentTime());
+    updateActiveTextTrackCues(currentMediaTime());
 
 #if ENABLE(WEBVTT_REGIONS)
     if (cue->isRenderable())
@@ -2089,7 +2086,7 @@ void HTMLMediaElement::setReadyState(MediaPlayer::ReadyState state)
     updateMediaController();
 #if ENABLE(VIDEO_TRACK)
     if (RuntimeEnabledFeatures::sharedFeatures().webkitVideoTrackEnabled())
-        updateActiveTextTrackCues(currentTime());
+        updateActiveTextTrackCues(currentMediaTime());
 #endif
 }
 
@@ -2242,7 +2239,7 @@ void HTMLMediaElement::progressEventTimerFired(Timer<HTMLMediaElement>&)
 void HTMLMediaElement::rewind(double timeDelta)
 {
     LOG(Media, "HTMLMediaElement::rewind(%f)", timeDelta);
-    setCurrentTime(std::max(currentTime() - timeDelta, minTimeSeekable()));
+    setCurrentTime(std::max(currentMediaTime() - MediaTime::createWithDouble(timeDelta), minTimeSeekable()));
 }
 
 void HTMLMediaElement::returnToRealtime()
@@ -2251,12 +2248,12 @@ void HTMLMediaElement::returnToRealtime()
     setCurrentTime(maxTimeSeekable());
 }
 
-void HTMLMediaElement::addPlayedRange(double start, double end)
+void HTMLMediaElement::addPlayedRange(const MediaTime& start, const MediaTime& end)
 {
-    LOG(Media, "HTMLMediaElement::addPlayedRange(%f, %f)", start, end);
+    LOG(Media, "HTMLMediaElement::addPlayedRange(%s, %s)", toString(start).utf8().data(), toString(end).utf8().data());
     if (!m_playedTimeRanges)
         m_playedTimeRanges = TimeRanges::create();
-    m_playedTimeRanges->add(start, end);
+    m_playedTimeRanges->ranges().add(start, end);
 }  
 
 bool HTMLMediaElement::supportsSave() const
@@ -2280,7 +2277,12 @@ void HTMLMediaElement::prepareToPlay()
 
 void HTMLMediaElement::fastSeek(double time)
 {
-    LOG(Media, "HTMLMediaElement::fastSeek(%f)", time);
+    fastSeek(MediaTime::createWithDouble(time));
+}
+
+void HTMLMediaElement::fastSeek(const MediaTime& time)
+{
+    LOG(Media, "HTMLMediaElement::fastSeek(%s)", toString(time).utf8().data());
     // 4.7.10.9 Seeking
     // 9. If the approximate-for-speed flag is set, adjust the new playback position to a value that will
     // allow for playback to resume promptly. If new playback position before this step is before current
@@ -2288,28 +2290,29 @@ void HTMLMediaElement::fastSeek(double time)
     // position. Similarly, if the new playback position before this step is after current playback position,
     // then the adjusted new playback position must also be after the current playback position.
     refreshCachedTime();
-    double delta = time - currentTime();
-    double negativeTolerance = delta >= 0 ? delta : std::numeric_limits<double>::infinity();
-    double positiveTolerance = delta < 0 ? -delta : std::numeric_limits<double>::infinity();
+    MediaTime delta = time - currentMediaTime();
+    MediaTime negativeTolerance = delta >= MediaTime::zeroTime() ? delta : MediaTime::positiveInfiniteTime();
+    MediaTime positiveTolerance = delta < MediaTime::zeroTime() ? -delta : MediaTime::positiveInfiniteTime();
 
     seekWithTolerance(time, negativeTolerance, positiveTolerance, true);
 }
 
-void HTMLMediaElement::seek(double time)
+void HTMLMediaElement::seek(const MediaTime& time)
 {
-    LOG(Media, "HTMLMediaElement::seek(%f)", time);
-    seekWithTolerance(time, 0, 0, true);
+    LOG(Media, "HTMLMediaElement::seek(%s)", toString(time).utf8().data());
+    seekWithTolerance(time, MediaTime::zeroTime(), MediaTime::zeroTime(), true);
 }
 
-void HTMLMediaElement::seekInternal(double time)
+void HTMLMediaElement::seekInternal(const MediaTime& time)
 {
-    LOG(Media, "HTMLMediaElement::seekInternal(%f)", time);
-    seekWithTolerance(time, 0, 0, false);
+    LOG(Media, "HTMLMediaElement::seekInternal(%s)", toString(time).utf8().data());
+    seekWithTolerance(time, MediaTime::zeroTime(), MediaTime::zeroTime(), false);
 }
 
-void HTMLMediaElement::seekWithTolerance(double time, double negativeTolerance, double positiveTolerance, bool fromDOM)
+void HTMLMediaElement::seekWithTolerance(const MediaTime& inTime, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance, bool fromDOM)
 {
     // 4.8.10.9 Seeking
+    MediaTime time = inTime;
 
     // 1 - Set the media element's show poster flag to false.
     setDisplayMode(Video);
@@ -2324,7 +2327,7 @@ void HTMLMediaElement::seekWithTolerance(double time, double negativeTolerance,
 
     // Get the current time before setting m_seeking, m_lastSeekTime is returned once it is set.
     refreshCachedTime();
-    double now = currentTime();
+    MediaTime now = currentMediaTime();
 
     // 3 - If the element's seeking IDL attribute is true, then another instance of this algorithm is
     // already running. Abort that other instance of the algorithm without waiting for the step that
@@ -2360,18 +2363,18 @@ void HTMLMediaElement::seekTimerFired(Timer<HTMLMediaElement>&)
     }
 
     ASSERT(m_pendingSeek);
-    double now = m_pendingSeek->now;
-    double time = m_pendingSeek->targetTime;
-    double negativeTolerance = m_pendingSeek->negativeTolerance;
-    double positiveTolerance = m_pendingSeek->positiveTolerance;
+    MediaTime now = m_pendingSeek->now;
+    MediaTime time = m_pendingSeek->targetTime;
+    MediaTime negativeTolerance = m_pendingSeek->negativeTolerance;
+    MediaTime positiveTolerance = m_pendingSeek->positiveTolerance;
     m_pendingSeek = nullptr;
 
     // 6 - If the new playback position is later than the end of the media resource, then let it be the end 
     // of the media resource instead.
-    time = std::min(time, duration());
+    time = std::min(time, durationMediaTime());
 
     // 7 - If the new playback position is less than the earliest possible position, let it be that position instead.
-    double earliestTime = m_player->startTime();
+    MediaTime earliestTime = m_player->startTime();
     time = std::max(time, earliestTime);
 
     // Ask the media engine for the time value in the movie's time scale before comparing with current time. This
@@ -2380,9 +2383,9 @@ void HTMLMediaElement::seekTimerFired(Timer<HTMLMediaElement>&)
     // not generate a timechanged callback. This means m_seeking will never be cleared and we will never 
     // fire a 'seeked' event.
 #if !LOG_DISABLED
-    double mediaTime = m_player->mediaTimeForTimeValue(time);
+    MediaTime mediaTime = m_player->mediaTimeForTimeValue(time);
     if (time != mediaTime)
-        LOG(Media, "HTMLMediaElement::seekTimerFired(%f) - media timeline equivalent is %f", time, mediaTime);
+        LOG(Media, "HTMLMediaElement::seekTimerFired(%s) - media timeline equivalent is %s", toString(time).utf8().data(), toString(mediaTime).utf8().data());
 #endif
     time = m_player->mediaTimeForTimeValue(time);
 
@@ -2413,7 +2416,7 @@ void HTMLMediaElement::seekTimerFired(Timer<HTMLMediaElement>&)
         m_seeking = false;
         return;
     }
-    time = seekableRanges->nearest(time);
+    time = seekableRanges->ranges().nearest(time);
 
     m_sentEndEvent = false;
     m_lastSeekTime = time;
@@ -2475,7 +2478,7 @@ bool HTMLMediaElement::seeking() const
 void HTMLMediaElement::refreshCachedTime() const
 {
     m_cachedTime = m_player->currentTime();
-    if (!m_cachedTime) { 
+    if (!m_cachedTime) {
         // Do not use m_cachedTime until the media engine returns a non-zero value because we can't 
         // estimate current time until playback actually begins. 
         invalidateCachedTime(); 
@@ -2488,7 +2491,7 @@ void HTMLMediaElement::refreshCachedTime() const
 void HTMLMediaElement::invalidateCachedTime() const
 {
 #if !LOG_DISABLED
-    if (m_cachedTime != MediaPlayer::invalidTime())
+    if (m_cachedTime.isValid())
         LOG(Media, "HTMLMediaElement::invalidateCachedTime");
 #endif
 
@@ -2498,29 +2501,34 @@ void HTMLMediaElement::invalidateCachedTime() const
     static const double minimumTimePlayingBeforeCacheSnapshot = 0.5;
 
     m_minimumClockTimeToUpdateCachedTime = monotonicallyIncreasingTime() + minimumTimePlayingBeforeCacheSnapshot;
-    m_cachedTime = MediaPlayer::invalidTime();
+    m_cachedTime = MediaTime::invalidTime();
 }
 
 // playback state
 double HTMLMediaElement::currentTime() const
 {
+    return currentMediaTime().toDouble();
+}
+
+MediaTime HTMLMediaElement::currentMediaTime() const
+{
 #if LOG_CACHED_TIME_WARNINGS
-    static const double minCachedDeltaForWarning = 0.01;
+    static const MediaTime minCachedDeltaForWarning = MediaTime::create(1, 100);
 #endif
 
     if (!m_player)
-        return 0;
+        return MediaTime::zeroTime();
 
     if (m_seeking) {
-        LOG(Media, "HTMLMediaElement::currentTime - seeking, returning %f", m_lastSeekTime);
+        LOG(Media, "HTMLMediaElement::currentTime - seeking, returning %s", toString(m_lastSeekTime).utf8().data());
         return m_lastSeekTime;
     }
 
-    if (m_cachedTime != MediaPlayer::invalidTime() && m_paused) {
+    if (m_cachedTime.isValid() && m_paused) {
 #if LOG_CACHED_TIME_WARNINGS
-        double delta = m_cachedTime - m_player->currentTime();
+        MediaTime delta = m_cachedTime - m_player->currentTime();
         if (delta > minCachedDeltaForWarning)
-            LOG(Media, "HTMLMediaElement::currentTime - WARNING, cached time is %f seconds off of media time when paused", delta);
+            LOG(Media, "HTMLMediaElement::currentTime - WARNING, cached time is %s seconds off of media time when paused", toString(delta).utf8().data());
 #endif
         return m_cachedTime;
     }
@@ -2529,15 +2537,15 @@ double HTMLMediaElement::currentTime() const
     double now = monotonicallyIncreasingTime();
     double maximumDurationToCacheMediaTime = m_player->maximumDurationToCacheMediaTime();
 
-    if (maximumDurationToCacheMediaTime && m_cachedTime != MediaPlayer::invalidTime() && !m_paused && now > m_minimumClockTimeToUpdateCachedTime) {
+    if (maximumDurationToCacheMediaTime && m_cachedTime.isValid() && !m_paused && now > m_minimumClockTimeToUpdateCachedTime) {
         double clockDelta = now - m_clockTimeAtLastCachedTimeUpdate;
 
         // Not too soon, use the cached time only if it hasn't expired.
         if (clockDelta < maximumDurationToCacheMediaTime) {
-            double adjustedCacheTime = m_cachedTime + (effectivePlaybackRate() * clockDelta);
+            MediaTime adjustedCacheTime = m_cachedTime + MediaTime::createWithDouble(effectivePlaybackRate() * clockDelta);
 
 #if LOG_CACHED_TIME_WARNINGS
-            double delta = adjustedCacheTime - m_player->currentTime();
+            MediaTime delta = adjustedCacheTime - m_player->currentTime();
             if (delta > minCachedDeltaForWarning)
                 LOG(Media, "HTMLMediaElement::currentTime - WARNING, cached time is %f seconds off of media time when playing", delta);
 #endif
@@ -2548,21 +2556,26 @@ double HTMLMediaElement::currentTime() const
 #if LOG_CACHED_TIME_WARNINGS
     if (maximumDurationToCacheMediaTime && now > m_minimumClockTimeToUpdateCachedTime && m_cachedTime != MediaPlayer::invalidTime()) {
         double clockDelta = now - m_clockTimeAtLastCachedTimeUpdate;
-        double delta = m_cachedTime + (effectivePlaybackRate() * clockDelta) - m_player->currentTime();
-        LOG(Media, "HTMLMediaElement::currentTime - cached time was %f seconds off of media time when it expired", delta);
+        MediaTime delta = m_cachedTime + MediaTime::createWithDouble(effectivePlaybackRate() * clockDelta) - m_player->currentTime();
+        LOG(Media, "HTMLMediaElement::currentTime - cached time was %s seconds off of media time when it expired", toString(delta).utf8().data());
     }
 #endif
 
     refreshCachedTime();
 
-    if (m_cachedTime == MediaPlayer::invalidTime())
-        return 0;
+    if (m_cachedTime.isInvalid())
+        return MediaTime::zeroTime();
     
     return m_cachedTime;
 }
 
 void HTMLMediaElement::setCurrentTime(double time)
 {
+    setCurrentTime(MediaTime::createWithDouble(time));
+}
+
+void HTMLMediaElement::setCurrentTime(const MediaTime& time)
+{
     if (m_mediaController)
         return;
 
@@ -2578,15 +2591,20 @@ void HTMLMediaElement::setCurrentTime(double time, ExceptionCode& ec)
         return;
     }
 
-    seek(time);
+    seek(MediaTime::createWithDouble(time));
 }
 
 double HTMLMediaElement::duration() const
 {
+    return durationMediaTime().toDouble();
+}
+
+MediaTime HTMLMediaElement::durationMediaTime() const
+{
     if (m_player && m_readyState >= HAVE_METADATA)
         return m_player->duration();
 
-    return std::numeric_limits<double>::quiet_NaN();
+    return MediaTime::invalidTime();
 }
 
 bool HTMLMediaElement::paused() const
@@ -2732,7 +2750,7 @@ void HTMLMediaElement::playInternal()
         scheduleDelayedAction(LoadMediaResource);
 
     if (endedPlayback())
-        seekInternal(0);
+        seekInternal(MediaTime::zeroTime());
 
     if (m_mediaController)
         m_mediaController->bringElementUpToSpeed(this);
@@ -3104,8 +3122,8 @@ void HTMLMediaElement::playbackProgressTimerFired(Timer<HTMLMediaElement>&)
 {
     ASSERT(m_player);
 
-    if (m_fragmentEndTime != MediaPlayer::invalidTime() && currentTime() >= m_fragmentEndTime && effectivePlaybackRate() > 0) {
-        m_fragmentEndTime = MediaPlayer::invalidTime();
+    if (m_fragmentEndTime.isValid() && currentMediaTime() >= m_fragmentEndTime && effectivePlaybackRate() > 0) {
+        m_fragmentEndTime = MediaTime::invalidTime();
         if (!m_mediaController && !m_paused) {
             // changes paused to true and fires a simple event named pause at the media element.
             pauseInternal();
@@ -3122,7 +3140,7 @@ void HTMLMediaElement::playbackProgressTimerFired(Timer<HTMLMediaElement>&)
 
 #if ENABLE(VIDEO_TRACK)
     if (RuntimeEnabledFeatures::sharedFeatures().webkitVideoTrackEnabled())
-        updateActiveTextTrackCues(currentTime());
+        updateActiveTextTrackCues(currentMediaTime());
 #endif
 
 #if ENABLE(MEDIA_SOURCE)
@@ -3142,7 +3160,7 @@ void HTMLMediaElement::scheduleTimeupdateEvent(bool periodicEvent)
 
     // Some media engines make multiple "time changed" callbacks at the same time, but we only want one
     // event at a given time so filter here
-    double movieTime = currentTime();
+    MediaTime movieTime = currentMediaTime();
     if (movieTime != m_lastTimeUpdateEventMovieTime) {
         scheduleEvent(eventNames().timeupdateEvent);
         m_clockTimeAtLastUpdateEvent = now;
@@ -3159,9 +3177,9 @@ double HTMLMediaElement::percentLoaded() const
 {
     if (!m_player)
         return 0;
-    double duration = m_player->duration();
+    MediaTime duration = m_player->duration();
 
-    if (!duration || std::isinf(duration))
+    if (!duration || duration.isPositiveInfinite() || duration.isNegativeInfinite())
         return 0;
 
     MediaTime buffered = MediaTime::zeroTime();
@@ -3172,7 +3190,7 @@ double HTMLMediaElement::percentLoaded() const
         MediaTime end = timeRanges->end(i, ignored);
         buffered += end - start;
     }
-    return buffered.toDouble() / duration;
+    return buffered.toDouble() / duration.toDouble();
 }
 
 #if ENABLE(VIDEO_TRACK)
@@ -4051,7 +4069,7 @@ void HTMLMediaElement::mediaPlayerTimeChanged(MediaPlayer*)
 
 #if ENABLE(VIDEO_TRACK)
     if (RuntimeEnabledFeatures::sharedFeatures().webkitVideoTrackEnabled())
-        updateActiveTextTrackCues(currentTime());
+        updateActiveTextTrackCues(currentMediaTime());
 #endif
 
     beginProcessingMediaPlayerCallback();
@@ -4068,20 +4086,20 @@ void HTMLMediaElement::mediaPlayerTimeChanged(MediaPlayer*)
     else
         scheduleTimeupdateEvent(false);
 
-    double now = currentTime();
-    double dur = duration();
+    MediaTime now = currentMediaTime();
+    MediaTime dur = durationMediaTime();
     double playbackRate = effectivePlaybackRate();
     
     // When the current playback position reaches the end of the media resource then the user agent must follow these steps:
-    if (!std::isnan(dur) && dur) {
+    if (dur.isValid() && dur) {
         // If the media element has a loop attribute specified and does not have a current media controller,
         if (loop() && !m_mediaController && playbackRate > 0) {
             m_sentEndEvent = false;
             // then seek to the earliest possible position of the media resource and abort these steps when the direction of
             // playback is forwards,
             if (now >= dur)
-                seekInternal(0);
-        } else if ((now <= 0 && playbackRate < 0) || (now >= dur && playbackRate > 0)) {
+                seekInternal(MediaTime::zeroTime());
+        } else if ((now <= MediaTime::zeroTime() && playbackRate < 0) || (now >= dur && playbackRate > 0)) {
             // If the media element does not have a current media controller, and the media element
             // has still ended playback and paused is false,
             if (!m_mediaController && !m_paused) {
@@ -4148,8 +4166,8 @@ void HTMLMediaElement::mediaPlayerDurationChanged(MediaPlayer* player)
     scheduleEvent(eventNames().durationchangeEvent);
     mediaPlayerCharacteristicChanged(player);
 
-    double now = currentTime();
-    double dur = duration();
+    MediaTime now = currentMediaTime();
+    MediaTime dur = durationMediaTime();
     if (now > dur)
         seekInternal(dur);
 
@@ -4326,7 +4344,7 @@ PassRefPtr<TimeRanges> HTMLMediaElement::buffered() const
 PassRefPtr<TimeRanges> HTMLMediaElement::played()
 {
     if (m_playing) {
-        double time = currentTime();
+        MediaTime time = currentMediaTime();
         if (time > m_lastSeekTime)
             addPlayedRange(m_lastSeekTime, time);
     }
@@ -4378,8 +4396,8 @@ bool HTMLMediaElement::couldPlayIfEnoughData() const
 
 bool HTMLMediaElement::endedPlayback() const
 {
-    double dur = duration();
-    if (!m_player || std::isnan(dur))
+    MediaTime dur = durationMediaTime();
+    if (!m_player || !dur.isValid())
         return false;
 
     // 4.8.10.8 Playing the media resource
@@ -4392,14 +4410,14 @@ bool HTMLMediaElement::endedPlayback() const
     // and the current playback position is the end of the media resource and the direction
     // of playback is forwards, Either the media element does not have a loop attribute specified,
     // or the media element has a current media controller.
-    double now = currentTime();
+    MediaTime now = currentMediaTime();
     if (effectivePlaybackRate() > 0)
-        return dur > 0 && now >= dur && (!loop() || m_mediaController);
+        return dur > MediaTime::zeroTime() && now >= dur && (!loop() || m_mediaController);
 
     // or the current playback position is the earliest possible position and the direction 
     // of playback is backwards
     if (effectivePlaybackRate() < 0)
-        return now <= 0;
+        return now <= MediaTime::zeroTime();
 
     return false;
 }
@@ -4423,14 +4441,14 @@ bool HTMLMediaElement::pausedForUserInteraction() const
     return false;
 }
 
-double HTMLMediaElement::minTimeSeekable() const
+MediaTime HTMLMediaElement::minTimeSeekable() const
 {
-    return m_player ? m_player->minTimeSeekable() : 0;
+    return m_player ? m_player->minTimeSeekable() : MediaTime::zeroTime();
 }
 
-double HTMLMediaElement::maxTimeSeekable() const
+MediaTime HTMLMediaElement::maxTimeSeekable() const
 {
-    return m_player ? m_player->maxTimeSeekable() : 0;
+    return m_player ? m_player->maxTimeSeekable() : MediaTime::zeroTime();
 }
     
 void HTMLMediaElement::updateVolume()
@@ -4522,7 +4540,7 @@ void HTMLMediaElement::updatePlayState()
 
         m_playbackProgressTimer.stop();
         m_playing = false;
-        double time = currentTime();
+        MediaTime time = currentMediaTime();
         if (time > m_lastSeekTime)
             addPlayedRange(m_lastSeekTime, time);
 
@@ -4602,7 +4620,7 @@ void HTMLMediaElement::userCancelledLoad()
     updateMediaController();
 #if ENABLE(VIDEO_TRACK)
     if (RuntimeEnabledFeatures::sharedFeatures().webkitVideoTrackEnabled())
-        updateActiveTextTrackCues(0);
+        updateActiveTextTrackCues(MediaTime::zeroTime());
 #endif
 }
 
@@ -5212,7 +5230,7 @@ void HTMLMediaElement::configureTextTrackDisplay(TextTrackVisibilityCheckType ch
     }
 
     if (checkType == CheckTextTrackVisibility && m_haveVisibleTextTrack == haveVisibleTextTrack) {
-        updateActiveTextTrackCues(currentTime());
+        updateActiveTextTrackCues(currentMediaTime());
         return;
     }
 
@@ -5236,7 +5254,7 @@ void HTMLMediaElement::configureTextTrackDisplay(TextTrackVisibilityCheckType ch
     
     if (RuntimeEnabledFeatures::sharedFeatures().webkitVideoTrackEnabled()) {
         updateTextTrackDisplay();
-        updateActiveTextTrackCues(currentTime());
+        updateActiveTextTrackCues(currentMediaTime());
     }
 }
 
@@ -5448,31 +5466,31 @@ bool HTMLMediaElement::isBlockedOnMediaController() const
 void HTMLMediaElement::prepareMediaFragmentURI()
 {
     MediaFragmentURIParser fragmentParser(m_currentSrc);
-    double dur = duration();
+    MediaTime dur = durationMediaTime();
     
-    double start = fragmentParser.startTime();
-    if (start != MediaFragmentURIParser::invalidTimeValue() && start > 0) {
+    MediaTime start = fragmentParser.startTime();
+    if (start.isValid() && start > MediaTime::zeroTime()) {
         m_fragmentStartTime = start;
         if (m_fragmentStartTime > dur)
             m_fragmentStartTime = dur;
     } else
-        m_fragmentStartTime = MediaPlayer::invalidTime();
+        m_fragmentStartTime = MediaTime::invalidTime();
     
-    double end = fragmentParser.endTime();
-    if (end != MediaFragmentURIParser::invalidTimeValue() && end > 0 && end > m_fragmentStartTime) {
+    MediaTime end = fragmentParser.endTime();
+    if (end.isValid() && end > MediaTime::zeroTime() && (!m_fragmentStartTime.isValid() || end > m_fragmentStartTime)) {
         m_fragmentEndTime = end;
         if (m_fragmentEndTime > dur)
             m_fragmentEndTime = dur;
     } else
-        m_fragmentEndTime = MediaPlayer::invalidTime();
+        m_fragmentEndTime = MediaTime::invalidTime();
     
-    if (m_fragmentStartTime != MediaPlayer::invalidTime() && m_readyState < HAVE_FUTURE_DATA)
+    if (m_fragmentStartTime.isValid() && m_readyState < HAVE_FUTURE_DATA)
         prepareToPlay();
 }
 
 void HTMLMediaElement::applyMediaFragmentURI()
 {
-    if (m_fragmentStartTime != MediaPlayer::invalidTime()) {
+    if (m_fragmentStartTime.isValid()) {
         m_sentEndEvent = false;
         seek(m_fragmentStartTime);
     }
@@ -5735,7 +5753,7 @@ RefPtr<VideoPlaybackQuality> HTMLMediaElement::getVideoPlaybackQuality()
         m_droppedVideoFrames + m_player->totalVideoFrames(),
         m_droppedVideoFrames + m_player->droppedVideoFrames(),
         m_player->corruptedVideoFrames(),
-        m_player->totalFrameDelay());
+        m_player->totalFrameDelay().toDouble());
 }
 #endif
 
index 3f4a378..675d5f9 100644 (file)
@@ -84,7 +84,7 @@ class TextTrackList;
 class VideoTrackList;
 class VideoTrackPrivate;
 
-typedef PODIntervalTree<double, TextTrackCue*> CueIntervalTree;
+typedef PODIntervalTree<MediaTime, TextTrackCue*> CueIntervalTree;
 typedef CueIntervalTree::IntervalType CueInterval;
 typedef Vector<CueInterval> CueList;
 #endif
@@ -176,6 +176,13 @@ public:
     virtual void setDefaultPlaybackRate(double) override;
     WEBCORE_EXPORT virtual double playbackRate() const override;
     virtual void setPlaybackRate(double) override;
+
+// MediaTime versions of playback state
+    MediaTime currentMediaTime() const;
+    void setCurrentTime(const MediaTime&);
+    MediaTime durationMediaTime() const;
+    void fastSeek(const MediaTime&);
+
     void updatePlaybackRate();
     bool webkitPreservesPitch() const;
     void setWebkitPreservesPitch(bool);
@@ -586,12 +593,12 @@ private:
     void startProgressEventTimer();
     void stopPeriodicTimers();
 
-    void seek(double time);
-    void seekInternal(double time);
-    void seekWithTolerance(double time, double negativeTolerance, double positiveTolerance, bool fromDOM);
+    void seek(const MediaTime&);
+    void seekInternal(const MediaTime&);
+    void seekWithTolerance(const MediaTime&, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance, bool fromDOM);
     void finishSeek();
     void checkIfSeekNeeded();
-    void addPlayedRange(double start, double end);
+    void addPlayedRange(const MediaTime& start, const MediaTime& end);
     
     void scheduleTimeupdateEvent(bool periodicEvent);
     void scheduleEvent(const AtomicString& eventName);
@@ -612,7 +619,7 @@ private:
     URL selectNextSourceChild(ContentType*, String* keySystem, InvalidURLAction);
 
 #if ENABLE(VIDEO_TRACK)
-    void updateActiveTextTrackCues(double);
+    void updateActiveTextTrackCues(const MediaTime&);
     HTMLTrackElement* showingTrackWithSameKind(HTMLTrackElement*) const;
 
     enum ReconfigureMode {
@@ -643,8 +650,8 @@ private:
     bool pausedForUserInteraction() const;
     bool couldPlayIfEnoughData() const;
 
-    double minTimeSeekable() const;
-    double maxTimeSeekable() const;
+    MediaTime minTimeSeekable() const;
+    MediaTime maxTimeSeekable() const;
 
     // Pauses playback without changing any states or generating events
     void setPausedInternal(bool);
@@ -720,23 +727,23 @@ private:
     RefPtr<MediaError> m_error;
 
     struct PendingSeek {
-        PendingSeek(double now, double targetTime, double negativeTolerance, double positiveTolerance)
+        PendingSeek(const MediaTime& now, const MediaTime& targetTime, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
             : now(now)
             , targetTime(targetTime)
             , negativeTolerance(negativeTolerance)
             , positiveTolerance(positiveTolerance)
         {
         }
-        double now;
-        double targetTime;
-        double negativeTolerance;
-        double positiveTolerance;
+        MediaTime now;
+        MediaTime targetTime;
+        MediaTime negativeTolerance;
+        MediaTime positiveTolerance;
     };
     std::unique_ptr<PendingSeek> m_pendingSeek;
 
     double m_volume;
     bool m_volumeInitialized;
-    double m_lastSeekTime;
+    MediaTime m_lastSeekTime;
     
     unsigned m_previousProgress;
     double m_previousProgressTime;
@@ -745,7 +752,7 @@ private:
     double m_clockTimeAtLastUpdateEvent;
 
     // The last time a timeupdate event was sent in movie time.
-    double m_lastTimeUpdateEventMovieTime;
+    MediaTime m_lastTimeUpdateEventMovieTime;
     
     // Loading state.
     enum LoadState { WaitingForSource, LoadingFromSrcAttr, LoadingFromSourceElement };
@@ -774,12 +781,12 @@ private:
     unsigned long m_droppedVideoFrames;
 #endif
 
-    mutable double m_cachedTime;
+    mutable MediaTime m_cachedTime;
     mutable double m_clockTimeAtLastCachedTimeUpdate;
     mutable double m_minimumClockTimeToUpdateCachedTime;
 
-    double m_fragmentStartTime;
-    double m_fragmentEndTime;
+    MediaTime m_fragmentStartTime;
+    MediaTime m_fragmentEndTime;
 
     typedef unsigned PendingActionFlags;
     PendingActionFlags m_pendingActionFlags;
@@ -834,7 +841,7 @@ private:
     bool m_processingPreferenceChange : 1;
 
     String m_subtitleTrackLanguage;
-    float m_lastTextTrackUpdateTime;
+    MediaTime m_lastTextTrackUpdateTime;
 
     CaptionUserPreferences::CaptionDisplayMode m_captionDisplayMode;
 
@@ -897,7 +904,7 @@ struct ValueToString<TextTrackCue*> {
         String text;
         if (cue->isRenderable())
             text = toVTTCue(cue)->text();
-        return String::format("%p id=%s interval=%f-->%f cue=%s)", cue, cue->id().utf8().data(), cue->startTime(), cue->endTime(), text.utf8().data());
+        return String::format("%p id=%s interval=%s-->%s cue=%s)", cue, cue->id().utf8().data(), toString(cue->startTime()).utf8().data(), toString(cue->endTime()).utf8().data(), text.utf8().data());
     }
 };
 #endif
@@ -910,6 +917,16 @@ template <> inline bool isElementOfType<const HTMLMediaElement>(const Element& e
 
 NODE_TYPE_CASTS(HTMLMediaElement)
 
+#ifndef NDEBUG
+template<>
+struct ValueToString<MediaTime> {
+    static String string(const MediaTime& time)
+    {
+        return toString(time);
+    }
+};
+#endif
+
 } //namespace
 
 #endif
index b3d228c..3a77682 100644 (file)
@@ -171,7 +171,7 @@ void MediaController::setCurrentTime(double time)
     
     // Seek each slaved media element to the new playback position relative to the media element timeline.
     for (size_t index = 0; index < m_mediaElements.size(); ++index)
-        m_mediaElements[index]->seek(time);
+        m_mediaElements[index]->seek(MediaTime::createWithDouble(time));
 
     scheduleTimeupdateEvent();
 }
@@ -479,7 +479,7 @@ void MediaController::bringElementUpToSpeed(HTMLMediaElement* element)
     // When the user agent is to bring a media element up to speed with its new media controller,
     // it must seek that media element to the MediaController's media controller position relative
     // to the media element's timeline.
-    element->seekInternal(currentTime());
+    element->seekInternal(MediaTime::createWithDouble(currentTime()));
 }
 
 bool MediaController::isBlocked() const
index 948d223..790f509 100644 (file)
@@ -70,32 +70,27 @@ static String collectFraction(const LChar* input, unsigned length, unsigned& pos
     return digits.toString();
 }
 
-double MediaFragmentURIParser::invalidTimeValue()
-{
-    return MediaPlayer::invalidTime();
-}
-
 MediaFragmentURIParser::MediaFragmentURIParser(const URL& url)
     : m_url(url)
     , m_timeFormat(None)
-    , m_startTime(MediaPlayer::invalidTime())
-    , m_endTime(MediaPlayer::invalidTime())
+    , m_startTime(MediaTime::invalidTime())
+    , m_endTime(MediaTime::invalidTime())
 {
 }
 
-double MediaFragmentURIParser::startTime()
+MediaTime MediaFragmentURIParser::startTime()
 {
     if (!m_url.isValid())
-        return MediaPlayer::invalidTime();
+        return MediaTime::invalidTime();
     if (m_timeFormat == None)
         parseTimeFragment();
     return m_startTime;
 }
 
-double MediaFragmentURIParser::endTime()
+MediaTime MediaFragmentURIParser::endTime()
 {
     if (!m_url.isValid())
-        return MediaPlayer::invalidTime();
+        return MediaTime::invalidTime();
     if (m_timeFormat == None)
         parseTimeFragment();
     return m_endTime;
@@ -184,8 +179,8 @@ void MediaFragmentURIParser::parseTimeFragment()
         // in the same format. The format is specified by name, followed by a colon (:), with npt: being
         // the default.
         
-        double start = MediaPlayer::invalidTime();
-        double end = MediaPlayer::invalidTime();
+        MediaTime start = MediaTime::invalidTime();
+        MediaTime end = MediaTime::invalidTime();
         if (parseNPTFragment(fragment.second.characters8(), fragment.second.length(), start, end)) {
             m_startTime = start;
             m_endTime = end;
@@ -202,7 +197,7 @@ void MediaFragmentURIParser::parseTimeFragment()
     m_fragments.clear();
 }
 
-bool MediaFragmentURIParser::parseNPTFragment(const LChar* timeString, unsigned length, double& startTime, double& endTime)
+bool MediaFragmentURIParser::parseNPTFragment(const LChar* timeString, unsigned length, MediaTime& startTime, MediaTime& endTime)
 {
     unsigned offset = 0;
     if (length >= nptIdentiferLength && timeString[0] == 'n' && timeString[1] == 'p' && timeString[2] == 't' && timeString[3] == ':')
@@ -215,7 +210,7 @@ bool MediaFragmentURIParser::parseNPTFragment(const LChar* timeString, unsigned
     // If a single number only is given, this corresponds to the begin time except if it is preceded
     // by a comma that would in this case indicate the end time.
     if (timeString[offset] == ',')
-        startTime = 0;
+        startTime = MediaTime::zeroTime();
     else {
         if (!parseNPTTime(timeString, length, offset, startTime))
             return false;
@@ -241,7 +236,7 @@ bool MediaFragmentURIParser::parseNPTFragment(const LChar* timeString, unsigned
     return true;
 }
 
-bool MediaFragmentURIParser::parseNPTTime(const LChar* timeString, unsigned length, unsigned& offset, double& time)
+bool MediaFragmentURIParser::parseNPTTime(const LChar* timeString, unsigned length, unsigned& offset, MediaTime& time)
 {
     enum Mode { minutes, hours };
     Mode mode = minutes;
@@ -271,17 +266,17 @@ bool MediaFragmentURIParser::parseNPTTime(const LChar* timeString, unsigned leng
     String digits1 = collectDigits(timeString, length, offset);
     int value1 = digits1.toInt();
     if (offset >= length || timeString[offset] == ',') {
-        time = value1;
+        time = MediaTime::createWithDouble(value1);
         return true;
     }
 
-    double fraction = 0;
+    MediaTime fraction;
     if (timeString[offset] == '.') {
         if (offset == length)
             return true;
         String digits = collectFraction(timeString, length, offset);
-        fraction = digits.toDouble();
-        time = value1 + fraction;
+        fraction = MediaTime::createWithDouble(digits.toDouble());
+        time = MediaTime::createWithDouble(value1) + fraction;
         return true;
     }
     
@@ -318,9 +313,9 @@ bool MediaFragmentURIParser::parseNPTTime(const LChar* timeString, unsigned leng
     }
 
     if (offset < length && timeString[offset] == '.')
-        fraction = collectFraction(timeString, length, offset).toDouble();
+        fraction = MediaTime::createWithDouble(collectFraction(timeString, length, offset).toDouble());
     
-    time = (value1 * secondsPerHour) + (value2 * secondsPerMinute) + value3 + fraction;
+    time = MediaTime::createWithDouble((value1 * secondsPerHour) + (value2 * secondsPerMinute) + value3) + fraction;
     return true;
 }
 
index e3b1deb..06aceb9 100644 (file)
@@ -29,6 +29,7 @@
 #if ENABLE(VIDEO)
 
 #include "URL.h"
+#include <wtf/MediaTime.h>
 #include <wtf/Vector.h>
 
 namespace WebCore {
@@ -40,10 +41,8 @@ public:
     
     MediaFragmentURIParser(const URL&);
 
-    double startTime();
-    double endTime();
-
-    static double invalidTimeValue();
+    MediaTime startTime();
+    MediaTime endTime();
 
 private:
 
@@ -51,13 +50,13 @@ private:
     
     enum TimeFormat { None, Invalid, NormalPlayTime, SMPTETimeCode, WallClockTimeCode };
     void parseTimeFragment();
-    bool parseNPTFragment(const LChar*, unsigned length, double& startTime, double& endTime);
-    bool parseNPTTime(const LChar*, unsigned length, unsigned& offset, double& time);
+    bool parseNPTFragment(const LChar*, unsigned length, MediaTime& startTime, MediaTime& endTime);
+    bool parseNPTTime(const LChar*, unsigned length, unsigned& offset, MediaTime&);
 
     URL m_url;
     TimeFormat m_timeFormat;
-    double m_startTime;
-    double m_endTime;
+    MediaTime m_startTime;
+    MediaTime m_endTime;
     Vector<std::pair<String, String>> m_fragments;
 };
 
index b43e251..a2eb829 100644 (file)
@@ -60,6 +60,7 @@ public:
     double totalDuration() const;
 
     const PlatformTimeRanges& ranges() const { return m_ranges; }
+    PlatformTimeRanges& ranges() { return m_ranges; }
 
 private:
     WEBCORE_EXPORT explicit TimeRanges();
index 0a0be50..fbf8f3c 100644 (file)
 
 namespace WebCore {
 
-DataCue::DataCue(ScriptExecutionContext& context, double start, double end, ArrayBuffer* data, const String& type, ExceptionCode& ec)
+DataCue::DataCue(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, ArrayBuffer* data, const String& type, ExceptionCode& ec)
     : TextTrackCue(context, start, end)
     , m_type(type)
 {
     setData(data, ec);
 }
 
-DataCue::DataCue(ScriptExecutionContext& context, double start, double end, const void* data, unsigned length)
+DataCue::DataCue(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, const void* data, unsigned length)
     : TextTrackCue(context, start, end)
 {
     m_data = ArrayBuffer::create(data, length);
 }
 
 #if ENABLE(DATACUE_VALUE)
-DataCue::DataCue(ScriptExecutionContext& context, double start, double end, PassRefPtr<SerializedPlatformRepresentation> platformValue, const String& type)
+DataCue::DataCue(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation> platformValue, const String& type)
     : TextTrackCue(context, start, end)
     , m_type(type)
     , m_platformValue(platformValue)
 {
 }
 
-DataCue::DataCue(ScriptExecutionContext& context, double start, double end, JSC::JSValue value, const String& type)
+DataCue::DataCue(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, JSC::JSValue value, const String& type)
     : TextTrackCue(context, start, end)
     , m_type(type)
     , m_value(value)
index ed4cc03..2b67287 100644 (file)
@@ -32,6 +32,7 @@
 #include "TextTrackCue.h"
 #include <runtime/ArrayBuffer.h>
 #include <runtime/JSCInlines.h>
+#include <wtf/MediaTime.h>
 #include <wtf/RefCounted.h>
 
 #if ENABLE(DATACUE_VALUE)
@@ -44,28 +45,28 @@ class ScriptExecutionContext;
 
 class DataCue : public TextTrackCue {
 public:
-    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, double start, double end, ArrayBuffer* data, ExceptionCode& ec)
+    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, ArrayBuffer* data, ExceptionCode& ec)
     {
         return adoptRef(new DataCue(context, start, end, data, emptyString(), ec));
     }
 
-    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, double start, double end, const void* data, unsigned length)
+    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, const void* data, unsigned length)
     {
         return adoptRef(new DataCue(context, start, end, data, length));
     }
 
-    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, double start, double end, ArrayBuffer* data, const String& type, ExceptionCode& ec)
+    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, ArrayBuffer* data, const String& type, ExceptionCode& ec)
     {
         return adoptRef(new DataCue(context, start, end, data, type, ec));
     }
 
 #if ENABLE(DATACUE_VALUE)
-    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, double start, double end, PassRefPtr<SerializedPlatformRepresentation> platformValue, const String& type)
+    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation> platformValue, const String& type)
     {
         return adoptRef(new DataCue(context, start, end, platformValue, type));
     }
 
-    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, double start, double end, JSC::JSValue value, const String& type)
+    static PassRefPtr<DataCue> create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, JSC::JSValue value, const String& type)
     {
         return adoptRef(new DataCue(context, start, end, value, type));
     }
@@ -94,11 +95,11 @@ public:
     virtual bool doesExtendCue(const TextTrackCue&) const override;
 
 protected:
-    DataCue(ScriptExecutionContext&, double start, double end, ArrayBuffer*, const String&, ExceptionCode&);
-    DataCue(ScriptExecutionContext&, double start, double end, const void*, unsigned);
+    DataCue(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end, ArrayBuffer*, const String&, ExceptionCode&);
+    DataCue(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end, const void*, unsigned);
 #if ENABLE(DATACUE_VALUE)
-    DataCue(ScriptExecutionContext&, double start, double end, PassRefPtr<SerializedPlatformRepresentation>, const String&);
-    DataCue(ScriptExecutionContext&, double start, double end, JSC::JSValue, const String&);
+    DataCue(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>, const String&);
+    DataCue(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end, JSC::JSValue, const String&);
 #endif
 
 private:
index f32c6fe..594c0f2 100644 (file)
@@ -52,14 +52,14 @@ InbandDataTextTrack::~InbandDataTextTrack()
 {
 }
 
-void InbandDataTextTrack::addDataCue(InbandTextTrackPrivate*, double start, double end, const void* data, unsigned length)
+void InbandDataTextTrack::addDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, const void* data, unsigned length)
 {
     RefPtr<DataCue> cue = DataCue::create(*scriptExecutionContext(), start, end, data, length);
     addCue(cue.release(), ASSERT_NO_EXCEPTION);
 }
 
 #if ENABLE(DATACUE_VALUE)
-void InbandDataTextTrack::addDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation> prpPlatformValue, const String& type)
+void InbandDataTextTrack::addDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation> prpPlatformValue, const String& type)
 {
     RefPtr<SerializedPlatformRepresentation> platformValue = prpPlatformValue;
     if (m_incompleteCueMap.find(platformValue.get()) != m_incompleteCueMap.end())
@@ -67,20 +67,22 @@ void InbandDataTextTrack::addDataCue(InbandTextTrackPrivate*, double start, doub
 
     RefPtr<DataCue> cue = DataCue::create(*scriptExecutionContext(), start, end, platformValue, type);
     if (hasCue(cue.get(), TextTrackCue::IgnoreDuration)) {
-        LOG(Media, "InbandDataTextTrack::addDataCue ignoring already added cue: start=%.2f, end=%.2f\n", cue->startTime(), cue->endTime());
+        LOG(Media, "InbandDataTextTrack::addDataCue ignoring already added cue: start=%s, end=%s\n", toString(cue->startTime()).utf8().data(), toString(cue->endTime()).utf8().data());
         return;
     }
 
-    if (std::isinf(end) && mediaElement()) {
-        cue->setEndTime(mediaElement()->duration(), IGNORE_EXCEPTION);
+    if (end.isPositiveInfinite() && mediaElement()) {
+        cue->setEndTime(mediaElement()->durationMediaTime());
         m_incompleteCueMap.add(platformValue, cue);
     }
 
     addCue(cue.release(), ASSERT_NO_EXCEPTION);
 }
 
-void InbandDataTextTrack::updateDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation> prpPlatformValue)
+void InbandDataTextTrack::updateDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& inEnd, PassRefPtr<SerializedPlatformRepresentation> prpPlatformValue)
 {
+    MediaTime end = inEnd;
+
     RefPtr<SerializedPlatformRepresentation> platformValue = prpPlatformValue;
     auto iter = m_incompleteCueMap.find(platformValue.get());
     if (iter == m_incompleteCueMap.end())
@@ -92,20 +94,20 @@ void InbandDataTextTrack::updateDataCue(InbandTextTrackPrivate*, double start, d
 
     cue->willChange();
 
-    if (std::isinf(end) && mediaElement())
-        end = mediaElement()->duration();
+    if (end.isPositiveInfinite() && mediaElement())
+        end = mediaElement()->durationMediaTime();
     else
         m_incompleteCueMap.remove(platformValue.get());
 
-    LOG(Media, "InbandDataTextTrack::updateDataCue: was start=%.2f, end=%.2f, will be start=%.2f, end=%.2f\n", cue->startTime(), cue->endTime(), start, end);
+    LOG(Media, "InbandDataTextTrack::updateDataCue: was start=%s, end=%s, will be start=%s, end=%s\n", toString(cue->startTime()).utf8().data(), toString(cue->endTime()).utf8().data(), toString(start).utf8().data(), toString(end).utf8().data());
 
-    cue->setStartTime(start, IGNORE_EXCEPTION);
-    cue->setEndTime(end, IGNORE_EXCEPTION);
+    cue->setStartTime(start);
+    cue->setEndTime(end);
 
     cue->didChange();
 }
 
-void InbandDataTextTrack::removeDataCue(InbandTextTrackPrivate*, double, double, PassRefPtr<SerializedPlatformRepresentation> prpPlatformValue)
+void InbandDataTextTrack::removeDataCue(InbandTextTrackPrivate*, const MediaTime&, const MediaTime&, PassRefPtr<SerializedPlatformRepresentation> prpPlatformValue)
 {
     RefPtr<SerializedPlatformRepresentation> platformValue = prpPlatformValue;
     auto iter = m_incompleteCueMap.find(platformValue.get());
@@ -114,7 +116,7 @@ void InbandDataTextTrack::removeDataCue(InbandTextTrackPrivate*, double, double,
 
     RefPtr<DataCue> cue = iter->value;
     if (cue) {
-        LOG(Media, "InbandDataTextTrack::removeDataCue removing cue: start=%.2f, end=%.2f\n", cue->startTime(), cue->endTime());
+        LOG(Media, "InbandDataTextTrack::removeDataCue removing cue: start=%s, end=%s\n", toString(cue->startTime()).utf8().data(), toString(cue->endTime()).utf8().data());
         removeCue(cue.get(), IGNORE_EXCEPTION);
     }
 }
index bb8a1ed..1166e4b 100644 (file)
@@ -50,12 +50,12 @@ public:
 private:
     InbandDataTextTrack(ScriptExecutionContext*, TextTrackClient*, PassRefPtr<InbandTextTrackPrivate>);
 
-    virtual void addDataCue(InbandTextTrackPrivate*, double start, double end, const void*, unsigned) override;
+    virtual void addDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, const void*, unsigned) override;
 
 #if ENABLE(DATACUE_VALUE)
-    virtual void addDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation>, const String&) override;
-    virtual void updateDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation>) override;
-    virtual void removeDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation>) override;
+    virtual void addDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>, const String&) override;
+    virtual void updateDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>) override;
+    virtual void removeDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>) override;
     virtual void removeCue(TextTrackCue*, ExceptionCode&) override;
 
     HashMap<RefPtr<SerializedPlatformRepresentation>, RefPtr<DataCue>> m_incompleteCueMap;
index 29eb952..624999c 100644 (file)
@@ -110,11 +110,11 @@ void InbandGenericTextTrack::updateCueFromCueData(TextTrackCueGeneric* cue, Gene
 {
     cue->willChange();
 
-    cue->setStartTime(cueData->startTime(), IGNORE_EXCEPTION);
-    double endTime = cueData->endTime();
-    if (std::isinf(endTime) && mediaElement())
-        endTime = mediaElement()->duration();
-    cue->setEndTime(endTime, IGNORE_EXCEPTION);
+    cue->setStartTime(cueData->startTime());
+    MediaTime endTime = cueData->endTime();
+    if (endTime.isPositiveInfinite() && mediaElement())
+        endTime = mediaElement()->durationMediaTime();
+    cue->setEndTime(endTime);
     cue->setText(cueData->content());
     cue->setId(cueData->id());
     cue->setBaseFontSizeRelativeToVideoHeight(cueData->baseFontSize());
@@ -156,11 +156,11 @@ void InbandGenericTextTrack::addGenericCue(InbandTextTrackPrivate* trackPrivate,
     RefPtr<TextTrackCueGeneric> cue = TextTrackCueGeneric::create(*scriptExecutionContext(), cueData->startTime(), cueData->endTime(), cueData->content());
     updateCueFromCueData(cue.get(), cueData.get());
     if (hasCue(cue.get(), TextTrackCue::IgnoreDuration)) {
-        LOG(Media, "InbandGenericTextTrack::addGenericCue ignoring already added cue: start=%.2f, end=%.2f, content=\"%s\"\n", cueData->startTime(), cueData->endTime(), cueData->content().utf8().data());
+        LOG(Media, "InbandGenericTextTrack::addGenericCue ignoring already added cue: start=%s, end=%s, content=\"%s\"\n", toString(cueData->startTime()).utf8().data(), toString(cueData->endTime()).utf8().data(), cueData->content().utf8().data());
         return;
     }
 
-    LOG(Media, "InbandGenericTextTrack::addGenericCue added cue: start=%.2f, end=%.2f, content=\"%s\"\n", cueData->startTime(), cueData->endTime(), cueData->content().utf8().data());
+    LOG(Media, "InbandGenericTextTrack::addGenericCue added cue: start=%.2f, end=%.2f, content=\"%s\"\n", cueData->startTime().toDouble(), cueData->endTime().toDouble(), cueData->content().utf8().data());
 
     if (cueData->status() != GenericCueData::Complete)
         m_cueMap.add(cueData.get(), cue.get());
@@ -184,10 +184,10 @@ void InbandGenericTextTrack::removeGenericCue(InbandTextTrackPrivate*, GenericCu
 {
     RefPtr<TextTrackCueGeneric> cue = m_cueMap.find(cueData);
     if (cue) {
-        LOG(Media, "InbandGenericTextTrack::removeGenericCue removing cue: start=%.2f, end=%.2f, content=\"%s\"\n", cueData->startTime(), cueData->endTime(), cueData->content().utf8().data());
+        LOG(Media, "InbandGenericTextTrack::removeGenericCue removing cue: start=%s, end=%s, content=\"%s\"\n",  toString(cueData->startTime()).utf8().data(), toString(cueData->endTime()).utf8().data(), cueData->content().utf8().data());
         removeCue(cue.get(), IGNORE_EXCEPTION);
     } else {
-        LOG(Media, "InbandGenericTextTrack::removeGenericCue UNABLE to find cue: start=%.2f, end=%.2f, content=\"%s\"\n", cueData->startTime(), cueData->endTime(), cueData->content().utf8().data());
+        LOG(Media, "InbandGenericTextTrack::removeGenericCue UNABLE to find cue: start=%.2f, end=%.2f, content=\"%s\"\n", cueData->startTime().toDouble(), cueData->endTime().toDouble(), cueData->content().utf8().data());
         m_cueMap.remove(cueData);
     }
 }
index 03ecf4e..875e7d8 100644 (file)
@@ -213,10 +213,10 @@ void InbandTextTrack::updateKindFromPrivate()
     }
 }
 
-double InbandTextTrack::startTimeVariance() const
+MediaTime InbandTextTrack::startTimeVariance() const
 {
     if (!m_private)
-        return false;
+        return MediaTime::zeroTime();
     
     return m_private->startTimeVariance();
 }
index bb2404d..0e5b51a 100644 (file)
@@ -69,12 +69,12 @@ private:
     virtual void languageChanged(TrackPrivateBase*, const AtomicString&) override;
     virtual void willRemove(TrackPrivateBase*) override;
 
-    virtual void addDataCue(InbandTextTrackPrivate*, double, double, const void*, unsigned) override { ASSERT_NOT_REACHED(); }
+    virtual void addDataCue(InbandTextTrackPrivate*, const MediaTime&, const MediaTime&, const void*, unsigned) override { ASSERT_NOT_REACHED(); }
 
 #if ENABLE(DATACUE_VALUE)
-    virtual void addDataCue(InbandTextTrackPrivate*, double, double, PassRefPtr<SerializedPlatformRepresentation>, const String&) override { ASSERT_NOT_REACHED(); }
-    virtual void updateDataCue(InbandTextTrackPrivate*, double, double, PassRefPtr<SerializedPlatformRepresentation>) override  { ASSERT_NOT_REACHED(); }
-    virtual void removeDataCue(InbandTextTrackPrivate*, double, double, PassRefPtr<SerializedPlatformRepresentation>) override  { ASSERT_NOT_REACHED(); }
+    virtual void addDataCue(InbandTextTrackPrivate*, const MediaTime&, const MediaTime&, PassRefPtr<SerializedPlatformRepresentation>, const String&) override { ASSERT_NOT_REACHED(); }
+    virtual void updateDataCue(InbandTextTrackPrivate*, const MediaTime&, const MediaTime&, PassRefPtr<SerializedPlatformRepresentation>) override  { ASSERT_NOT_REACHED(); }
+    virtual void removeDataCue(InbandTextTrackPrivate*, const MediaTime&, const MediaTime&, PassRefPtr<SerializedPlatformRepresentation>) override  { ASSERT_NOT_REACHED(); }
 #endif
 
     virtual void addGenericCue(InbandTextTrackPrivate*, PassRefPtr<GenericCueData>) override { ASSERT_NOT_REACHED(); }
@@ -85,7 +85,7 @@ private:
     virtual void parseWebVTTCueData(InbandTextTrackPrivate*, const char*, unsigned) override { ASSERT_NOT_REACHED(); }
     virtual void parseWebVTTCueData(InbandTextTrackPrivate*, const ISOWebVTTCue&) override { ASSERT_NOT_REACHED(); }
 
-    virtual double startTimeVariance() const;
+    virtual MediaTime startTimeVariance() const;
 
 #if USE(PLATFORM_TEXT_TRACK_MENU)
     virtual InbandTextTrackPrivate* privateTrack() override { return m_private.get(); }
index 2df357c..bcccdca 100644 (file)
@@ -294,7 +294,7 @@ void TextTrack::addCue(PassRefPtr<TextTrackCue> prpCue, ExceptionCode& ec)
     }
 
     // TODO(93143): Add spec-compliant behavior for negative time values.
-    if (std::isnan(cue->startTime()) || std::isnan(cue->endTime()) || cue->startTime() < 0 || cue->endTime() < 0)
+    if (!cue->startMediaTime().isValid() || !cue->endMediaTime().isValid() || cue->startMediaTime() < MediaTime::zeroTime() || cue->endMediaTime() < MediaTime::zeroTime())
         return;
 
     // 4.8.10.12.5 Text track API
@@ -485,7 +485,7 @@ int TextTrack::trackIndexRelativeToRenderedTracks()
 
 bool TextTrack::hasCue(TextTrackCue* cue, TextTrackCue::CueMatchRules match)
 {
-    if (cue->startTime() < 0 || cue->endTime() < 0)
+    if (cue->startMediaTime() < MediaTime::zeroTime() || cue->endMediaTime() < MediaTime::zeroTime())
         return false;
     
     if (!m_cues || !m_cues->length())
@@ -522,7 +522,7 @@ bool TextTrack::hasCue(TextTrackCue* cue, TextTrackCue::CueMatchRules match)
                 if (!existingCue)
                     return false;
 
-                if (cue->startTime() > (existingCue->startTime() + startTimeVariance()))
+                if (cue->startMediaTime() > (existingCue->startMediaTime() + startTimeVariance()))
                     return false;
 
                 if (existingCue->isEqual(*cue, match))
@@ -532,7 +532,7 @@ bool TextTrack::hasCue(TextTrackCue* cue, TextTrackCue::CueMatchRules match)
         
         size_t index = (searchStart + searchEnd) / 2;
         existingCue = m_cues->item(index);
-        if ((cue->startTime() + startTimeVariance()) < existingCue->startTime() || (match != TextTrackCue::IgnoreDuration && cue->hasEquivalentStartTime(*existingCue) && cue->endTime() > existingCue->endTime()))
+        if ((cue->startMediaTime() + startTimeVariance()) < existingCue->startMediaTime() || (match != TextTrackCue::IgnoreDuration && cue->hasEquivalentStartTime(*existingCue) && cue->endMediaTime() > existingCue->endMediaTime()))
             searchEnd = index;
         else
             searchStart = index + 1;
index 9a26445..b9cd97f 100644 (file)
@@ -158,7 +158,7 @@ public:
 
     virtual bool isInband() const { return false; }
 
-    virtual double startTimeVariance() const { return 0; }
+    virtual MediaTime startTimeVariance() const { return MediaTime::zeroTime(); }
 
     using RefCounted<TrackBase>::ref;
     using RefCounted<TrackBase>::deref;
index 5609e04..af2cf26 100644 (file)
@@ -60,10 +60,15 @@ static const int invalidCueIndex = -1;
 
 PassRefPtr<TextTrackCue> TextTrackCue::create(ScriptExecutionContext& context, double start, double end, const String& content)
 {
+    return create(context, MediaTime::createWithDouble(start), MediaTime::createWithDouble(end), content);
+}
+
+PassRefPtr<TextTrackCue> TextTrackCue::create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, const String& content)
+{
     return VTTCue::create(context, start, end, content);
 }
 
-TextTrackCue::TextTrackCue(ScriptExecutionContext& context, double start, double end)
+TextTrackCue::TextTrackCue(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end)
     : m_startTime(start)
     , m_endTime(end)
     , m_cueIndex(invalidCueIndex)
@@ -128,9 +133,14 @@ void TextTrackCue::setStartTime(double value, ExceptionCode& ec)
     }
     
     // TODO(93143): Add spec-compliant behavior for negative time values.
-    if (m_startTime == value || value < 0)
+    if (m_startTime.toDouble() == value || value < 0)
         return;
 
+    setStartTime(MediaTime::createWithDouble(value));
+}
+
+void TextTrackCue::setStartTime(const MediaTime& value)
+{
     willChange();
     m_startTime = value;
     didChange();
@@ -145,9 +155,14 @@ void TextTrackCue::setEndTime(double value, ExceptionCode& ec)
     }
 
     // TODO(93143): Add spec-compliant behavior for negative time values.
-    if (m_endTime == value || value < 0)
+    if (m_endTime.toDouble() == value || value < 0)
         return;
 
+    setEndTime(MediaTime::createWithDouble(value));
+}
+
+void TextTrackCue::setEndTime(const MediaTime& value)
+{
     willChange();
     m_endTime = value;
     didChange();
@@ -199,7 +214,7 @@ void TextTrackCue::setIsActive(bool active)
 
 bool TextTrackCue::isOrderedBefore(const TextTrackCue* other) const
 {
-    return startTime() < other->startTime() || (startTime() == other->startTime() && endTime() > other->endTime());
+    return startMediaTime() < other->startMediaTime() || (startMediaTime() == other->startMediaTime() && endMediaTime() > other->endMediaTime());
 }
 
 bool TextTrackCue::cueContentsMatch(const TextTrackCue& cue) const
@@ -218,7 +233,7 @@ bool TextTrackCue::isEqual(const TextTrackCue& cue, TextTrackCue::CueMatchRules
     if (cueType() != cue.cueType())
         return false;
 
-    if (match != IgnoreDuration && endTime() != cue.endTime())
+    if (match != IgnoreDuration && endMediaTime() != cue.endMediaTime())
         return false;
     if (!hasEquivalentStartTime(cue))
         return false;
@@ -230,13 +245,13 @@ bool TextTrackCue::isEqual(const TextTrackCue& cue, TextTrackCue::CueMatchRules
 
 bool TextTrackCue::hasEquivalentStartTime(const TextTrackCue& cue) const
 {
-    double startTimeVariance = 0;
+    MediaTime startTimeVariance = MediaTime::zeroTime();
     if (track())
         startTimeVariance = track()->startTimeVariance();
     else if (cue.track())
         startTimeVariance = cue.track()->startTimeVariance();
 
-    return std::abs(std::abs(startTime()) - std::abs(cue.startTime())) <= startTimeVariance;
+    return abs(abs(startMediaTime()) - abs(cue.startMediaTime())) <= startTimeVariance;
 }
 
 bool TextTrackCue::doesExtendCue(const TextTrackCue& cue) const
@@ -244,7 +259,7 @@ bool TextTrackCue::doesExtendCue(const TextTrackCue& cue) const
     if (!cueContentsMatch(cue))
         return false;
 
-    if (endTime() != cue.startTime())
+    if (endMediaTime() != cue.startMediaTime())
         return false;
     
     return true;
index 1e3421d..f0ee66b 100644 (file)
@@ -36,6 +36,7 @@
 
 #include "EventTarget.h"
 #include "HTMLElement.h"
+#include <wtf/MediaTime.h>
 #include <wtf/RefCounted.h>
 
 namespace WebCore {
@@ -45,6 +46,7 @@ class TextTrack;
 class TextTrackCue : public RefCounted<TextTrackCue>, public EventTargetWithInlineData {
 public:
     static PassRefPtr<TextTrackCue> create(ScriptExecutionContext&, double start, double end, const String& content);
+    static PassRefPtr<TextTrackCue> create(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end, const String& content);
 
     static const AtomicString& cueShadowPseudoId()
     {
@@ -60,10 +62,14 @@ public:
     const String& id() const { return m_id; }
     void setId(const String&);
 
-    double startTime() const { return m_startTime; }
+    MediaTime startMediaTime() const { return m_startTime; }
+    double startTime() const { return startMediaTime().toDouble(); }
+    void setStartTime(const MediaTime&);
     void setStartTime(double, ExceptionCode&);
 
-    double endTime() const { return m_endTime; }
+    MediaTime endMediaTime() const { return m_endTime; }
+    double endTime() const { return endMediaTime().toDouble(); }
+    void setEndTime(const MediaTime&);
     void setEndTime(double, ExceptionCode&);
 
     bool pauseOnExit() const { return m_pauseOnExit; }
@@ -112,7 +118,7 @@ public:
     using RefCounted<TextTrackCue>::deref;
 
 protected:
-    TextTrackCue(ScriptExecutionContext&, double start, double end);
+    TextTrackCue(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end);
 
     Document& ownerDocument() { return toDocument(m_scriptExecutionContext); }
 
@@ -122,8 +128,8 @@ private:
     virtual void derefEventTarget() override final { deref(); }
 
     String m_id;
-    double m_startTime;
-    double m_endTime;
+    MediaTime m_startTime;
+    MediaTime m_endTime;
     int m_cueIndex;
     int m_processingCueChanges;
 
index 75d1b04..9e89adb 100644 (file)
@@ -144,7 +144,7 @@ void TextTrackCueGenericBoxElement::applyCSSProperties(const IntSize& videoSize)
     setInlineStyleProperty(CSSPropertyWhiteSpace, CSSValuePreWrap);
 }
 
-TextTrackCueGeneric::TextTrackCueGeneric(ScriptExecutionContext& context, double start, double end, const String& content)
+TextTrackCueGeneric::TextTrackCueGeneric(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, const String& content)
     : VTTCue(context, start, end, content)
     , m_baseFontSizeRelativeToVideoHeight(0)
     , m_fontSizeMultiplier(0)
index 6db791b..0db8a40 100644 (file)
@@ -38,7 +38,7 @@ class GenericCueData;
 // A "generic" cue is a non-WebVTT cue, so it is not positioned/sized with the WebVTT logic.
 class TextTrackCueGeneric final : public VTTCue {
 public:
-    static PassRefPtr<TextTrackCueGeneric> create(ScriptExecutionContext& context, double start, double end, const String& content)
+    static PassRefPtr<TextTrackCueGeneric> create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, const String& content)
     {
         return adoptRef(new TextTrackCueGeneric(context, start, end, content));
     }
@@ -82,7 +82,7 @@ private:
     virtual bool isOrderedBefore(const TextTrackCue*) const override;
     virtual bool isPositionedAbove(const TextTrackCue*) const override;
 
-    TextTrackCueGeneric(ScriptExecutionContext&, double start, double end, const String&);
+    TextTrackCueGeneric(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end, const String&);
     
     Color m_foregroundColor;
     Color m_backgroundColor;
index a084270..e8566c2 100644 (file)
@@ -77,8 +77,8 @@ TextTrackCueList* TextTrackCueList::activeCues()
 
 bool TextTrackCueList::add(PassRefPtr<TextTrackCue> cue)
 {
-    ASSERT(cue->startTime() >= 0);
-    ASSERT(cue->endTime() >= 0);
+    ASSERT(cue->startMediaTime() >= MediaTime::zeroTime());
+    ASSERT(cue->endMediaTime() >= MediaTime::zeroTime());
 
     return add(cue, 0, m_list.size());
 }
index b1cc58a..824c46e 100644 (file)
@@ -247,17 +247,12 @@ const AtomicString& VTTCue::cueBackdropShadowPseudoId()
     return cueBackdropShadowPseudoId;
 }
 
-PassRefPtr<VTTCue> VTTCue::create(ScriptExecutionContext& context, double start, double end, const String& content)
-{
-    return adoptRef(new VTTCue(context, start, end, content));
-}
-
 PassRefPtr<VTTCue> VTTCue::create(ScriptExecutionContext& context, const WebVTTCueData& data)
 {
     return adoptRef(new VTTCue(context, data));
 }
 
-VTTCue::VTTCue(ScriptExecutionContext& context, double start, double end, const String& content)
+VTTCue::VTTCue(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, const String& content)
     : TextTrackCue(context, start, end)
     , m_content(content)
 {
@@ -265,12 +260,12 @@ VTTCue::VTTCue(ScriptExecutionContext& context, double start, double end, const
 }
 
 VTTCue::VTTCue(ScriptExecutionContext& context, const WebVTTCueData& cueData)
-    : TextTrackCue(context, 0, 0)
+    : TextTrackCue(context, MediaTime::zeroTime(), MediaTime::zeroTime())
 {
     initialize(context);
     setText(cueData.content());
-    setStartTime(cueData.startTime(), IGNORE_EXCEPTION);
-    setEndTime(cueData.endTime(), IGNORE_EXCEPTION);
+    setStartTime(cueData.startTime());
+    setEndTime(cueData.endTime());
     setId(cueData.id());
     setCueSettings(cueData.settings());
     m_originalStartTime = cueData.originalStartTime();
@@ -300,7 +295,7 @@ void VTTCue::initialize(ScriptExecutionContext& context)
     m_snapToLines = true;
     m_displayTreeShouldChange = true;
     m_notifyRegion = true;
-    m_originalStartTime = 0;
+    m_originalStartTime = MediaTime::zeroTime();
 }
 
 PassRefPtr<VTTCueBox> VTTCue::createDisplayTree()
@@ -751,18 +746,18 @@ void VTTCue::calculateDisplayParameters()
         m_displayPosition.first = m_computedLinePosition;
 }
     
-void VTTCue::markFutureAndPastNodes(ContainerNode* root, double previousTimestamp, double movieTime)
+void VTTCue::markFutureAndPastNodes(ContainerNode* root, const MediaTime& previousTimestamp, const MediaTime& movieTime)
 {
     DEPRECATED_DEFINE_STATIC_LOCAL(const String, timestampTag, (ASCIILiteral("timestamp")));
     
     bool isPastNode = true;
-    double currentTimestamp = previousTimestamp;
+    MediaTime currentTimestamp = previousTimestamp;
     if (currentTimestamp > movieTime)
         isPastNode = false;
     
     for (Node* child = root->firstChild(); child; child = NodeTraversal::next(child, root)) {
         if (child->nodeName() == timestampTag) {
-            double currentTimestamp;
+            MediaTime currentTimestamp;
             bool check = WebVTTParser::collectTimeStamp(child->nodeValue(), currentTimestamp);
             ASSERT_UNUSED(check, check);
             
@@ -780,7 +775,7 @@ void VTTCue::markFutureAndPastNodes(ContainerNode* root, double previousTimestam
     }
 }
 
-void VTTCue::updateDisplayTree(double movieTime)
+void VTTCue::updateDisplayTree(const MediaTime& movieTime)
 {
     // The display tree may contain WebVTT timestamp objects representing
     // timestamps (processing instructions), along with displayable nodes.
@@ -796,7 +791,7 @@ void VTTCue::updateDisplayTree(double movieTime)
     if (!referenceTree)
         return;
 
-    markFutureAndPastNodes(referenceTree.get(), startTime(), movieTime);
+    markFutureAndPastNodes(referenceTree.get(), startMediaTime(), movieTime);
     m_cueHighlightBox->appendChild(referenceTree);
 }
 
index 6fb4794..422fa84 100644 (file)
@@ -74,7 +74,16 @@ protected:
 
 class VTTCue : public TextTrackCue {
 public:
-    static PassRefPtr<VTTCue> create(ScriptExecutionContext&, double start, double end, const String&);
+    static PassRefPtr<VTTCue> create(ScriptExecutionContext& context, double start, double end, const String& content)
+    {
+        return create(context, MediaTime::createWithDouble(start), MediaTime::createWithDouble(end), content);
+    }
+
+    static PassRefPtr<VTTCue> create(ScriptExecutionContext& context, const MediaTime& start, const MediaTime& end, const String& content)
+    {
+        return adoptRef(new VTTCue(context, start, end, content));
+    }
+
     static PassRefPtr<VTTCue> create(ScriptExecutionContext&, const WebVTTCueData&);
 
     static const AtomicString& cueBackdropShadowPseudoId();
@@ -120,9 +129,9 @@ public:
     VTTCueBox* getDisplayTree(const IntSize& videoSize, int fontSize);
     HTMLSpanElement* element() const { return m_cueHighlightBox.get(); }
 
-    void updateDisplayTree(double);
+    void updateDisplayTree(const MediaTime&);
     void removeDisplayTree();
-    void markFutureAndPastNodes(ContainerNode*, double, double);
+    void markFutureAndPastNodes(ContainerNode*, const MediaTime&, const MediaTime&);
 
     int calculateComputedLinePosition();
     std::pair<double, double> getPositionCoordinates() const;
@@ -164,7 +173,7 @@ public:
     virtual void didChange() override;
 
 protected:
-    VTTCue(ScriptExecutionContext&, double start, double end, const String& content);
+    VTTCue(ScriptExecutionContext&, const MediaTime& start, const MediaTime& end, const String& content);
     VTTCue(ScriptExecutionContext&, const WebVTTCueData&);
 
     virtual PassRefPtr<VTTCueBox> createDisplayTree();
@@ -215,7 +224,7 @@ private:
     int m_displaySize;
     std::pair<float, float> m_displayPosition;
 
-    double m_originalStartTime;
+    MediaTime m_originalStartTime;
 
     bool m_snapToLines : 1;
     bool m_displayTreeShouldChange : 1;
index 60717a1..3bb2b0d 100644 (file)
@@ -89,8 +89,6 @@ WebVTTParser::WebVTTParser(WebVTTParserClient* client, ScriptExecutionContext* c
     : m_scriptExecutionContext(context)
     , m_state(Initial)
     , m_decoder(TextResourceDecoder::create("text/plain", UTF8Encoding()))
-    , m_currentStartTime(0)
-    , m_currentEndTime(0)
     , m_client(client)
 {
 }
@@ -128,15 +126,15 @@ void WebVTTParser::parseCueData(const ISOWebVTTCue& data)
 {
     RefPtr<WebVTTCueData> cue = WebVTTCueData::create();
 
-    double startTime = data.presentationTime().toDouble();
+    MediaTime startTime = data.presentationTime();
     cue->setStartTime(startTime);
-    cue->setEndTime(startTime + data.duration().toDouble());
+    cue->setEndTime(startTime + data.duration());
 
     cue->setContent(data.cueText());
     cue->setId(data.id());
     cue->setSettings(data.settings());
 
-    double originalStartTime;
+    MediaTime originalStartTime;
     if (WebVTTParser::collectTimeStamp(data.originalStartTime(), originalStartTime))
         cue->setOriginalStartTime(originalStartTime);
 
@@ -437,8 +435,8 @@ void WebVTTParser::resetCueValues()
 {
     m_currentId = emptyString();
     m_currentSettings = emptyString();
-    m_currentStartTime = 0;
-    m_currentEndTime = 0;
+    m_currentStartTime = MediaTime::zeroTime();
+    m_currentEndTime = MediaTime::zeroTime();
     m_currentContent.clear();
 }
 
@@ -465,7 +463,7 @@ void WebVTTParser::createNewRegion(const String& headerValue)
 }
 #endif
 
-bool WebVTTParser::collectTimeStamp(const String& line, double& timeStamp)
+bool WebVTTParser::collectTimeStamp(const String& line, MediaTime& timeStamp)
 {
     if (line.isEmpty())
         return false;
@@ -474,7 +472,7 @@ bool WebVTTParser::collectTimeStamp(const String& line, double& timeStamp)
     return collectTimeStamp(input, timeStamp);
 }
 
-bool WebVTTParser::collectTimeStamp(VTTScanner& input, double& timeStamp)
+bool WebVTTParser::collectTimeStamp(VTTScanner& input, MediaTime& timeStamp)
 {
     // Collect a WebVTT timestamp (5.3 WebVTT cue timings and settings parsing.)
     // Steps 1 - 4 - Initial checks, let most significant units be minutes.
@@ -514,7 +512,7 @@ bool WebVTTParser::collectTimeStamp(VTTScanner& input, double& timeStamp)
         return false;
 
     // Steps 18 - 19 - Calculate result.
-    timeStamp = (value1 * secondsPerHour) + (value2 * secondsPerMinute) + value3 + (value4 * secondsPerMillisecond);
+    timeStamp = MediaTime::createWithDouble((value1 * secondsPerHour) + (value2 * secondsPerMinute) + value3 + (value4 * secondsPerMillisecond));
     return true;
 }
 
@@ -611,7 +609,7 @@ void WebVTTTreeBuilder::constructTreeFromToken(Document& document)
     }
     case WebVTTTokenTypes::TimestampTag: {
         String charactersString = m_token.characters();
-        double parsedTimeStamp;
+        MediaTime parsedTimeStamp;
         if (WebVTTParser::collectTimeStamp(charactersString, parsedTimeStamp))
             m_currentNode->parserAppendChild(ProcessingInstruction::create(document, "timestamp", charactersString));
         break;
index c03299e..ec3d28d 100644 (file)
@@ -42,6 +42,7 @@
 #include "VTTRegion.h"
 #include "WebVTTTokenizer.h"
 #include <memory>
+#include <wtf/MediaTime.h>
 #include <wtf/text/StringBuilder.h>
 
 namespace WebCore {
@@ -69,11 +70,11 @@ public:
     static PassRefPtr<WebVTTCueData> create() { return adoptRef(new WebVTTCueData()); }
     virtual ~WebVTTCueData() { }
 
-    double startTime() const { return m_startTime; }
-    void setStartTime(double startTime) { m_startTime = startTime; }
+    MediaTime startTime() const { return m_startTime; }
+    void setStartTime(const MediaTime& startTime) { m_startTime = startTime; }
 
-    double endTime() const { return m_endTime; }
-    void setEndTime(double endTime) { m_endTime = endTime; }
+    MediaTime endTime() const { return m_endTime; }
+    void setEndTime(const MediaTime& endTime) { m_endTime = endTime; }
 
     String id() const { return m_id; }
     void setId(String id) { m_id = id; }
@@ -84,20 +85,15 @@ public:
     String settings() const { return m_settings; }
     void setSettings(String settings) { m_settings = settings; }
 
-    double originalStartTime() const { return m_originalStartTime; }
-    void setOriginalStartTime(double time) { m_originalStartTime = time; }
+    MediaTime originalStartTime() const { return m_originalStartTime; }
+    void setOriginalStartTime(const MediaTime& time) { m_originalStartTime = time; }
 
 private:
-    WebVTTCueData()
-        : m_startTime(0)
-        , m_endTime(0)
-        , m_originalStartTime(0)
-    {
-    }
+    WebVTTCueData() { }
 
-    double m_startTime;
-    double m_endTime;
-    double m_originalStartTime;
+    MediaTime m_startTime;
+    MediaTime m_endTime;
+    MediaTime m_originalStartTime;
     String m_id;
     String m_content;
     String m_settings;
@@ -137,7 +133,7 @@ public:
         // U+0020 SPACE characters or U+0009 CHARACTER TABULATION (tab) characters.
         return c == ' ' || c == '\t';
     }
-    static bool collectTimeStamp(const String&, double&);
+    static bool collectTimeStamp(const String&, MediaTime&);
 
     // Useful functions for parsing percentage settings.
     static bool parseFloatPercentageValue(VTTScanner& valueScanner, float&);
@@ -183,13 +179,13 @@ private:
     void createNewRegion(const String& headerValue);
 #endif
 
-    static bool collectTimeStamp(VTTScanner& input, double& timeStamp);
+    static bool collectTimeStamp(VTTScanner& input, MediaTime& timeStamp);
 
     BufferedLineReader m_lineReader;
     RefPtr<TextResourceDecoder> m_decoder;
     String m_currentId;
-    double m_currentStartTime;
-    double m_currentEndTime;
+    MediaTime m_currentStartTime;
+    MediaTime m_currentEndTime;
     StringBuilder m_currentContent;
     String m_currentSettings;
 
index ce535d1..07602d2 100644 (file)
@@ -28,6 +28,7 @@
 
 #include "Color.h"
 #include "TrackPrivateBase.h"
+#include <wtf/MediaTime.h>
 
 #if ENABLE(DATACUE_VALUE)
 #include "SerializedPlatformRepresentation.h"
@@ -46,11 +47,11 @@ public:
     static PassRefPtr<GenericCueData> create() { return adoptRef(new GenericCueData()); }
     virtual ~GenericCueData() { }
 
-    double startTime() const { return m_startTime; }
-    void setStartTime(double startTime) { m_startTime = startTime; }
+    MediaTime startTime() const { return m_startTime; }
+    void setStartTime(const MediaTime& startTime) { m_startTime = startTime; }
 
-    double endTime() const { return m_endTime; }
-    void setEndTime(double endTime) { m_endTime = endTime; }
+    MediaTime endTime() const { return m_endTime; }
+    void setEndTime(const MediaTime& endTime) { m_endTime = endTime; }
 
     String id() const { return m_id; }
     void setId(String id) { m_id = id; }
@@ -106,9 +107,7 @@ public:
 
 private:
     GenericCueData()
-        : m_startTime(0)
-        , m_endTime(0)
-        , m_line(-1)
+        : m_line(-1)
         , m_position(-1)
         , m_size(-1)
         , m_align(None)
@@ -118,8 +117,8 @@ private:
     {
     }
 
-    double m_startTime;
-    double m_endTime;
+    MediaTime m_startTime;
+    MediaTime m_endTime;
     String m_id;
     String m_content;
     double m_line;
@@ -169,12 +168,12 @@ class InbandTextTrackPrivateClient : public TrackPrivateBaseClient {
 public:
     virtual ~InbandTextTrackPrivateClient() { }
 
-    virtual void addDataCue(InbandTextTrackPrivate*, double start, double end, const void*, unsigned) = 0;
+    virtual void addDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, const void*, unsigned) = 0;
 
 #if ENABLE(DATACUE_VALUE)
-    virtual void addDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation>, const String&) = 0;
-    virtual void updateDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation>) = 0;
-    virtual void removeDataCue(InbandTextTrackPrivate*, double start, double end, PassRefPtr<SerializedPlatformRepresentation>) = 0;
+    virtual void addDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>, const String&) = 0;
+    virtual void updateDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>) = 0;
+    virtual void removeDataCue(InbandTextTrackPrivate*, const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>) = 0;
 #endif
 
     virtual void addGenericCue(InbandTextTrackPrivate*, PassRefPtr<GenericCueData>) = 0;
index 07889c9..a7f4b1e 100644 (file)
@@ -517,34 +517,34 @@ void MediaPlayer::setCDMSession(CDMSession* session)
 }
 #endif
     
-double MediaPlayer::duration() const
+MediaTime MediaPlayer::duration() const
 {
-    return m_private->durationDouble();
+    return m_private->durationMediaTime();
 }
 
-double MediaPlayer::startTime() const
+MediaTime MediaPlayer::startTime() const
 {
-    return m_private->startTimeDouble();
+    return m_private->startTime();
 }
 
-double MediaPlayer::initialTime() const
+MediaTime MediaPlayer::initialTime() const
 {
     return m_private->initialTime();
 }
 
-double MediaPlayer::currentTime() const
+MediaTime MediaPlayer::currentTime() const
 {
-    return m_private->currentTimeDouble();
+    return m_private->currentMediaTime();
 }
 
-void MediaPlayer::seekWithTolerance(double time, double negativeTolerance, double positiveTolerance)
+void MediaPlayer::seekWithTolerance(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
 {
     m_private->seekWithTolerance(time, negativeTolerance, positiveTolerance);
 }
 
-void MediaPlayer::seek(double time)
+void MediaPlayer::seek(const MediaTime& time)
 {
-    m_private->seekDouble(time);
+    m_private->seek(time);
 }
 
 bool MediaPlayer::paused() const
@@ -722,14 +722,14 @@ std::unique_ptr<PlatformTimeRanges> MediaPlayer::seekable()
     return m_private->seekable();
 }
 
-double MediaPlayer::maxTimeSeekable()
+MediaTime MediaPlayer::maxTimeSeekable()
 {
-    return m_private->maxTimeSeekableDouble();
+    return m_private->maxMediaTimeSeekable();
 }
 
-double MediaPlayer::minTimeSeekable()
+MediaTime MediaPlayer::minTimeSeekable()
 {
-    return m_private->minTimeSeekable();
+    return m_private->minMediaTimeSeekable();
 }
 
 bool MediaPlayer::didLoadingProgress()
@@ -950,9 +950,9 @@ MediaPlayer::MovieLoadType MediaPlayer::movieLoadType() const
     return m_private->movieLoadType();
 }
 
-double MediaPlayer::mediaTimeForTimeValue(double timeValue) const
+MediaTime MediaPlayer::mediaTimeForTimeValue(const MediaTime& timeValue) const
 {
-    return m_private->mediaTimeForTimeValueDouble(timeValue);
+    return m_private->mediaTimeForTimeValue(timeValue);
 }
 
 double MediaPlayer::maximumDurationToCacheMediaTime() const
@@ -1367,10 +1367,10 @@ unsigned long MediaPlayer::corruptedVideoFrames()
     return m_private->corruptedVideoFrames();
 }
 
-double MediaPlayer::totalFrameDelay()
+MediaTime MediaPlayer::totalFrameDelay()
 {
     if (!m_private)
-        return 0;
+        return MediaTime::zeroTime();
 
     return m_private->totalFrameDelay();
 }
index a5dc22f..5ce7514 100644 (file)
@@ -366,14 +366,13 @@ public:
     bool seeking() const;
 
     static double invalidTime() { return -1.0;}
-    double duration() const;
-    double currentTime() const;
-    void seek(double time);
-    void seekWithTolerance(double time, double negativeTolerance, double positiveTolerance);
+    MediaTime duration() const;
+    MediaTime currentTime() const;
+    void seek(const MediaTime&);
+    void seekWithTolerance(const MediaTime&, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance);
 
-    double startTime() const;
-
-    double initialTime() const;
+    MediaTime startTime() const;
+    MediaTime initialTime() const;
 
     double rate() const;
     void setRate(double);
@@ -383,8 +382,8 @@ public:
 
     std::unique_ptr<PlatformTimeRanges> buffered();
     std::unique_ptr<PlatformTimeRanges> seekable();
-    double minTimeSeekable();
-    double maxTimeSeekable();
+    MediaTime minTimeSeekable();
+    MediaTime maxTimeSeekable();
 
     bool didLoadingProgress();
 
@@ -506,7 +505,7 @@ public:
 
     bool didPassCORSAccessCheck() const;
 
-    double mediaTimeForTimeValue(double) const;
+    MediaTime mediaTimeForTimeValue(const MediaTime&) const;
 
     double maximumDurationToCacheMediaTime() const;
 
@@ -582,7 +581,7 @@ public:
     unsigned long totalVideoFrames();
     unsigned long droppedVideoFrames();
     unsigned long corruptedVideoFrames();
-    double totalFrameDelay();
+    MediaTime totalFrameDelay();
 #endif
 
     bool shouldWaitForResponseToAuthenticationChallenge(const AuthenticationChallenge&);
index eaae5f2..97ff181 100644 (file)
@@ -82,20 +82,21 @@ public:
 
     virtual float duration() const { return 0; }
     virtual double durationDouble() const { return duration(); }
+    virtual MediaTime durationMediaTime() const { return MediaTime::createWithDouble(durationDouble()); }
 
     virtual float currentTime() const { return 0; }
     virtual double currentTimeDouble() const { return currentTime(); }
+    virtual MediaTime currentMediaTime() const { return MediaTime::createWithDouble(currentTimeDouble()); }
 
     virtual void seek(float) { }
     virtual void seekDouble(double time) { seek(time); }
-    virtual void seekWithTolerance(double time, double, double) { seekDouble(time); }
+    virtual void seek(const MediaTime& time) { seekDouble(time.toDouble()); }
+    virtual void seekWithTolerance(const MediaTime& time, const MediaTime&, const MediaTime&) { seek(time); }
 
     virtual bool seeking() const = 0;
 
-    virtual float startTime() const { return 0; }
-    virtual double startTimeDouble() const { return startTime(); }
-
-    virtual double initialTime() const { return 0; }
+    virtual MediaTime startTime() const { return MediaTime::zeroTime(); }
+    virtual MediaTime initialTime() const { return MediaTime::zeroTime(); }
 
     virtual void setRate(float) { }
     virtual void setRateDouble(double rate) { setRate(rate); }
@@ -122,10 +123,11 @@ public:
     virtual MediaPlayer::NetworkState networkState() const = 0;
     virtual MediaPlayer::ReadyState readyState() const = 0;
 
-    virtual std::unique_ptr<PlatformTimeRanges> seekable() const { return maxTimeSeekableDouble() ? PlatformTimeRanges::create(MediaTime::createWithDouble(minTimeSeekable()), MediaTime::createWithDouble(maxTimeSeekableDouble())) : PlatformTimeRanges::create(); }
+    virtual std::unique_ptr<PlatformTimeRanges> seekable() const { return maxMediaTimeSeekable() == MediaTime::zeroTime() ? PlatformTimeRanges::create() : PlatformTimeRanges::create(minMediaTimeSeekable(), maxMediaTimeSeekable()); }
     virtual float maxTimeSeekable() const { return 0; }
-    virtual double maxTimeSeekableDouble() const { return maxTimeSeekable(); }
+    virtual MediaTime maxMediaTimeSeekable() const { return MediaTime::createWithDouble(maxTimeSeekable()); }
     virtual double minTimeSeekable() const { return 0; }
+    virtual MediaTime minMediaTimeSeekable() const { return MediaTime::createWithDouble(minTimeSeekable()); }
     virtual std::unique_ptr<PlatformTimeRanges> buffered() const = 0;
 
     virtual bool didLoadingProgress() const = 0;
@@ -188,8 +190,7 @@ public:
 
     // Time value in the movie's time scale. It is only necessary to override this if the media
     // engine uses rational numbers to represent media time.
-    virtual float mediaTimeForTimeValue(float timeValue) const { return timeValue; }
-    virtual double mediaTimeForTimeValueDouble(double timeValue) const { return timeValue; }
+    virtual MediaTime mediaTimeForTimeValue(const MediaTime& timeValue) const { return timeValue; }
 
     // Overide this if it is safe for HTMLMediaElement to cache movie time and report
     // 'currentTime' as [cached time + elapsed wall time]. Returns the maximum wall time
@@ -249,7 +250,7 @@ public:
     virtual unsigned long totalVideoFrames() { return 0; }
     virtual unsigned long droppedVideoFrames() { return 0; }
     virtual unsigned long corruptedVideoFrames() { return 0; }
-    virtual double totalFrameDelay() { return 0; }
+    virtual MediaTime totalFrameDelay() { return MediaTime::zeroTime(); }
 #endif
 
 #if ENABLE(AVF_CAPTIONS)
index 43f1717..f20538c 100644 (file)
@@ -40,7 +40,7 @@ public:
     virtual ~MediaSourcePrivateClient() { }
 
     virtual void setPrivateAndOpen(PassRef<MediaSourcePrivate>) = 0;
-    virtual double duration() const = 0;
+    virtual MediaTime duration() const = 0;
     virtual std::unique_ptr<PlatformTimeRanges> buffered() const = 0;
     virtual void seekToTime(const MediaTime&) = 0;
 };
index a7339bc..731aa8d 100644 (file)
@@ -29,6 +29,7 @@
 #define TrackPrivateBase_h
 
 #include <wtf/Forward.h>
+#include <wtf/MediaTime.h>
 #include <wtf/Noncopyable.h>
 #include <wtf/RefCounted.h>
 #include <wtf/text/AtomicString.h>
@@ -61,7 +62,7 @@ public:
 
     virtual int trackIndex() const { return 0; }
 
-    virtual double startTimeVariance() const { return 0; }
+    virtual MediaTime startTimeVariance() const { return MediaTime::zeroTime(); }
     
     void willBeRemoved()
     {
index 5850072..5290494 100644 (file)
@@ -47,7 +47,6 @@ InbandMetadataTextTrackPrivateAVF::InbandMetadataTextTrackPrivateAVF(InbandTextT
     : InbandTextTrackPrivate(cueFormat)
     , m_kind(kind)
     , m_id(id)
-    , m_currentCueStartTime(0)
 {
 }
 
@@ -56,7 +55,7 @@ InbandMetadataTextTrackPrivateAVF::~InbandMetadataTextTrackPrivateAVF()
 }
 
 #if ENABLE(DATACUE_VALUE)
-void InbandMetadataTextTrackPrivateAVF::addDataCue(double start, double end, PassRefPtr<SerializedPlatformRepresentation> prpCueData, const String& type)
+void InbandMetadataTextTrackPrivateAVF::addDataCue(const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation> prpCueData, const String& type)
 {
     ASSERT(cueFormat() == Data);
     if (!client())
@@ -64,42 +63,42 @@ void InbandMetadataTextTrackPrivateAVF::addDataCue(double start, double end, Pas
 
     RefPtr<SerializedPlatformRepresentation> cueData = prpCueData;
     m_currentCueStartTime = start;
-    if (end == std::numeric_limits<double>::infinity())
+    if (end.isPositiveInfinite())
         m_incompleteCues.append(new IncompleteMetaDataCue(start, cueData));
     client()->addDataCue(this, start, end, cueData, type);
 }
 
-void InbandMetadataTextTrackPrivateAVF::updatePendingCueEndTimes(double time)
+void InbandMetadataTextTrackPrivateAVF::updatePendingCueEndTimes(const MediaTime& time)
 {
     if (time >= m_currentCueStartTime) {
         for (size_t i = 0; i < m_incompleteCues.size(); i++) {
             IncompleteMetaDataCue* partialCue = m_incompleteCues[i];
 
-            LOG(Media, "InbandMetadataTextTrackPrivateAVF::addDataCue(%p) - updating cue: start=%.2f, end=%.2f", this, partialCue->startTime(), time);
+            LOG(Media, "InbandMetadataTextTrackPrivateAVF::addDataCue(%p) - updating cue: start=%s, end=%s", this, toString(partialCue->startTime()).utf8().data(), toString(time).utf8().data());
             client()->updateDataCue(this, partialCue->startTime(), time, partialCue->cueData());
         }
     } else
-        LOG(Media, "InbandMetadataTextTrackPrivateAVF::addDataCue negative length cue(s) ignored: start=%.2f, end=%.2f\n", m_currentCueStartTime, time);
+        LOG(Media, "InbandMetadataTextTrackPrivateAVF::addDataCue negative length cue(s) ignored: start=%s, end=%s\n", toString(m_currentCueStartTime).utf8().data(), toString(time).utf8().data());
 
     m_incompleteCues.resize(0);
-    m_currentCueStartTime = 0;
+    m_currentCueStartTime = MediaTime::zeroTime();
 }
 #endif
 
 void InbandMetadataTextTrackPrivateAVF::flushPartialCues()
 {
     if (m_currentCueStartTime && m_incompleteCues.size())
-        LOG(Media, "InbandMetadataTextTrackPrivateAVF::resetCueValues flushing incomplete data for cues: start=%.2f\n", m_currentCueStartTime);
+        LOG(Media, "InbandMetadataTextTrackPrivateAVF::resetCueValues flushing incomplete data for cues: start=%s\n", toString(m_currentCueStartTime).utf8().data());
 
     if (client()) {
         for (size_t i = 0; i < m_incompleteCues.size(); i++) {
             IncompleteMetaDataCue* partialCue = m_incompleteCues[i];
-            client()->removeDataCue(this, partialCue->startTime(), std::numeric_limits<double>::infinity(), partialCue->cueData());
+            client()->removeDataCue(this, partialCue->startTime(), MediaTime::positiveInfiniteTime(), partialCue->cueData());
         }
     }
 
     m_incompleteCues.resize(0);
-    m_currentCueStartTime = 0;
+    m_currentCueStartTime = MediaTime::zeroTime();
 }
 
 } // namespace WebCore
index 0dc3d77..554360f 100644 (file)
@@ -35,7 +35,7 @@ namespace WebCore {
 #if ENABLE(DATACUE_VALUE)
 class IncompleteMetaDataCue {
 public:
-    IncompleteMetaDataCue(double time, PassRefPtr<SerializedPlatformRepresentation> cueData)
+    IncompleteMetaDataCue(const MediaTime& time, PassRefPtr<SerializedPlatformRepresentation> cueData)
         : m_cueData(cueData)
         , m_startTime(time)
     {
@@ -43,11 +43,11 @@ public:
     ~IncompleteMetaDataCue() { }
 
     RefPtr<SerializedPlatformRepresentation> cueData() const { return m_cueData; }
-    double startTime() const { return m_startTime; }
+    MediaTime startTime() const { return m_startTime; }
 
 private:
     RefPtr<SerializedPlatformRepresentation> m_cueData;
-    double m_startTime;
+    MediaTime m_startTime;
 };
 #endif
 
@@ -63,8 +63,8 @@ public:
     void setInBandMetadataTrackDispatchType(const AtomicString& value) { m_inBandMetadataTrackDispatchType = value; }
 
 #if ENABLE(DATACUE_VALUE)
-    void addDataCue(double start, double end, PassRefPtr<SerializedPlatformRepresentation>, const String&);
-    void updatePendingCueEndTimes(double);
+    void addDataCue(const MediaTime& start, const MediaTime& end, PassRefPtr<SerializedPlatformRepresentation>, const String&);
+    void updatePendingCueEndTimes(const MediaTime&);
 #endif
 
     void flushPartialCues();
@@ -75,7 +75,7 @@ private:
     Kind m_kind;
     AtomicString m_id;
     AtomicString m_inBandMetadataTrackDispatchType;
-    double m_currentCueStartTime;
+    MediaTime m_currentCueStartTime;
 #if ENABLE(DATACUE_VALUE)
     Vector<IncompleteMetaDataCue*> m_incompleteCues;
 #endif
index 1fb383f..fb69d76 100644 (file)
@@ -32,6 +32,7 @@
 #include "ISOVTTCue.h"
 #include "InbandTextTrackPrivateClient.h"
 #include "Logging.h"
+#include "MediaTimeAVFoundation.h"
 #include "SoftLinking.h"
 #include <CoreMedia/CoreMedia.h>
 #include <runtime/ArrayBuffer.h>
 #include <wtf/unicode/CharacterNames.h>
 
 #if !PLATFORM(WIN)
-#include "MediaTimeMac.h"
-#endif
-
-#if !PLATFORM(WIN)
 #define SOFT_LINK_AVF_FRAMEWORK(Lib) SOFT_LINK_FRAMEWORK_OPTIONAL(Lib)
 #define SOFT_LINK_AVF_POINTER(Lib, Name, Type) SOFT_LINK_POINTER_OPTIONAL(Lib, Name, Type)
 #else
@@ -420,7 +417,7 @@ void InbandTextTrackPrivateAVF::processCueAttributes(CFAttributedStringRef attri
         cueData.setContent(content.toString());
 }
 
-void InbandTextTrackPrivateAVF::processCue(CFArrayRef attributedStrings, CFArrayRef nativeSamples, double time)
+void InbandTextTrackPrivateAVF::processCue(CFArrayRef attributedStrings, CFArrayRef nativeSamples, const MediaTime& time)
 {
     if (!client())
         return;
@@ -429,9 +426,9 @@ void InbandTextTrackPrivateAVF::processCue(CFArrayRef attributedStrings, CFArray
     processNativeSamples(nativeSamples, time);
 }
 
-void InbandTextTrackPrivateAVF::processAttributedStrings(CFArrayRef attributedStrings, double time)
+void InbandTextTrackPrivateAVF::processAttributedStrings(CFArrayRef attributedStrings, const MediaTime& time)
 {
-    LOG(Media, "InbandTextTrackPrivateAVF::processAttributedStrings - %li attributed strings at time %.2f\n", attributedStrings ? CFArrayGetCount(attributedStrings) : 0, time);
+    LOG(Media, "InbandTextTrackPrivateAVF::processCue - %li cues at time %s\n", attributedStrings ? CFArrayGetCount(attributedStrings) : 0, toString(time).utf8().data());
 
     Vector<RefPtr<GenericCueData>> arrivingCues;
     if (attributedStrings) {
@@ -450,14 +447,14 @@ void InbandTextTrackPrivateAVF::processAttributedStrings(CFArrayRef attributedSt
             arrivingCues.append(cueData);
             
             cueData->setStartTime(time);
-            cueData->setEndTime(std::numeric_limits<double>::infinity());
+            cueData->setEndTime(MediaTime::positiveInfiniteTime());
             
             // AVFoundation cue "position" is to the center of the text so adjust relative to the edge because we will use it to
             // set CSS "left".
             if (cueData->position() >= 0 && cueData->size() > 0)
                 cueData->setPosition(cueData->position() - cueData->size() / 2);
             
-            LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - considering cue (\"%s\") for time = %.2f, position =  %.2f, line =  %.2f", this, cueData->content().utf8().data(), cueData->startTime(), cueData->position(), cueData->line());
+            LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - considering cue (\"%s\") for time = %s, position =  %.2f, line =  %.2f", this, cueData->content().utf8().data(), toString(cueData->startTime()).utf8().data(), cueData->position(), cueData->line());
             
             cueData->setStatus(GenericCueData::Partial);
         }
@@ -475,7 +472,7 @@ void InbandTextTrackPrivateAVF::processAttributedStrings(CFArrayRef attributedSt
                     if (!arrivingCue->doesExtendCueData(*cueData))
                         nonExtensionCues.append(arrivingCue);
                     else
-                        LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - found an extension cue (\"%s\") for time = %.2f, end = %.2f, position =  %.2f, line =  %.2f", this, arrivingCue->content().utf8().data(), arrivingCue->startTime(), arrivingCue->endTime(), arrivingCue->position(), arrivingCue->line());
+                        LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - found an extension cue (\"%s\") for time = %.2f, end = %.2f, position =  %.2f, line =  %.2f", this, arrivingCue->content().utf8().data(), arrivingCue->startTime().toDouble(), arrivingCue->endTime().toDouble(), arrivingCue->position(), arrivingCue->line());
                 }
 
                 bool currentCueIsExtended = (arrivingCues.size() != nonExtensionCues.size());
@@ -489,16 +486,16 @@ void InbandTextTrackPrivateAVF::processAttributedStrings(CFArrayRef attributedSt
                     cueData->setEndTime(m_currentCueEndTime);
                     cueData->setStatus(GenericCueData::Complete);
 
-                    LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - updating cue \"%s\": start=%.2f, end=%.2f", this, cueData->content().utf8().data(), cueData->startTime(), m_currentCueEndTime);
+                    LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - updating cue \"%s\": start=%.2f, end=%.2f", this, cueData->content().utf8().data(), cueData->startTime().toDouble(), m_currentCueEndTime.toDouble());
                     client()->updateGenericCue(this, cueData.get());
                 } else {
                     // We have to assume that the implicit duration is invalid for cues delivered during a seek because the AVF decode pipeline may not
                     // see every cue, so DO NOT update cue duration while seeking.
-                    LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - ignoring cue delivered during seek: start=%.2f, end=%.2f, content=\"%s\"", this, cueData->startTime(), m_currentCueEndTime, cueData->content().utf8().data());
+                    LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - ignoring cue delivered during seek: start=%s, end=%s, content=\"%s\"", this, toString(cueData->startTime()).utf8().data(), toString(m_currentCueEndTime).utf8().data(), cueData->content().utf8().data());
                 }
             }
         } else
-            LOG(Media, "InbandTextTrackPrivateAVF::processCue negative length cue(s) ignored: start=%.2f, end=%.2f\n", m_currentCueStartTime, m_currentCueEndTime);
+            LOG(Media, "InbandTextTrackPrivateAVF::processCue negative length cue(s) ignored: start=%s, end=%s\n",  toString(m_currentCueStartTime).utf8().data(), toString(m_currentCueEndTime).utf8().data());
 
         removeCompletedCues();
     }
@@ -512,7 +509,7 @@ void InbandTextTrackPrivateAVF::processAttributedStrings(CFArrayRef attributedSt
 
         m_cues.append(cueData);
         
-        LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - adding cue \"%s\" for time = %.2f, end = %.2f, position =  %.2f, line =  %.2f", this, cueData->content().utf8().data(), cueData->startTime(), cueData->endTime(), cueData->position(), cueData->line());
+        LOG(Media, "InbandTextTrackPrivateAVF::processCue(%p) - adding cue \"%s\" for time = %.2f, end = %.2f, position =  %.2f, line =  %.2f", this, cueData->content().utf8().data(), cueData->startTime().toDouble(), cueData->endTime().toDouble(), cueData->position(), cueData->line());
 
         client()->addGenericCue(this, cueData.release());
     }
@@ -543,7 +540,7 @@ void InbandTextTrackPrivateAVF::removeCompletedCues()
             if (m_cues[currentCue]->status() != GenericCueData::Complete)
                 continue;
 
-            LOG(Media, "InbandTextTrackPrivateAVF::removeCompletedCues(%p) - removing cue \"%s\": start=%.2f, end=%.2f", this, m_cues[currentCue]->content().utf8().data(), m_cues[currentCue]->startTime(), m_cues[currentCue]->endTime());
+            LOG(Media, "InbandTextTrackPrivateAVF::removeCompletedCues(%p) - removing cue \"%s\": start=%.2f, end=%.2f", this, m_cues[currentCue]->content().utf8().data(), m_cues[currentCue]->startTime().toDouble(), m_cues[currentCue]->endTime().toDouble());
 
             m_cues.remove(currentCue);
         }
@@ -552,14 +549,14 @@ void InbandTextTrackPrivateAVF::removeCompletedCues()
     if (m_cues.isEmpty())
         m_pendingCueStatus = None;
 
-    m_currentCueStartTime = 0;
-    m_currentCueEndTime = 0;
+    m_currentCueStartTime = MediaTime::zeroTime();
+    m_currentCueEndTime = MediaTime::zeroTime();
 }
 
 void InbandTextTrackPrivateAVF::resetCueValues()
 {
     if (m_currentCueEndTime && m_cues.size())
-        LOG(Media, "InbandTextTrackPrivateAVF::resetCueValues flushing data for cues: start=%.2f\n", m_currentCueStartTime);
+        LOG(Media, "InbandTextTrackPrivateAVF::resetCueValues flushing data for cues: start=%s\n", toString(m_currentCueStartTime).utf8().data());
 
     if (client()) {
         for (size_t i = 0; i < m_cues.size(); i++)
@@ -568,8 +565,8 @@ void InbandTextTrackPrivateAVF::resetCueValues()
 
     m_cues.resize(0);
     m_pendingCueStatus = None;
-    m_currentCueStartTime = 0;
-    m_currentCueEndTime = 0;
+    m_currentCueStartTime = MediaTime::zeroTime();
+    m_currentCueEndTime = MediaTime::zeroTime();
 }
 
 void InbandTextTrackPrivateAVF::setMode(InbandTextTrackPrivate::Mode newMode)
@@ -586,7 +583,7 @@ void InbandTextTrackPrivateAVF::setMode(InbandTextTrackPrivate::Mode newMode)
     m_owner->trackModeChanged();
 }
 
-void InbandTextTrackPrivateAVF::processNativeSamples(CFArrayRef nativeSamples, double presentationTime)
+void InbandTextTrackPrivateAVF::processNativeSamples(CFArrayRef nativeSamples, const MediaTime& presentationTime)
 {
     if (!nativeSamples)
         return;
@@ -595,7 +592,7 @@ void InbandTextTrackPrivateAVF::processNativeSamples(CFArrayRef nativeSamples, d
     if (!count)
         return;
 
-    LOG(Media, "InbandTextTrackPrivateAVF::processNativeSamples - %li sample buffers at time %.2f\n", count, presentationTime);
+    LOG(Media, "InbandTextTrackPrivateAVF::processNativeSamples - %li sample buffers at time %.2f\n", count, presentationTime.toDouble());
 
     for (CFIndex i = 0; i < count; i++) {
 
@@ -632,11 +629,7 @@ void InbandTextTrackPrivateAVF::processNativeSamples(CFArrayRef nativeSamples, d
         LOG(Media, "InbandTextTrackPrivateAVF::processNativeSamples(%p) - chunk type = '%s', size = %zu", this, type.utf8().data(), boxLength);
 
         if (type == ISOWebVTTCue::boxType()) {
-#if !PLATFORM(WIN)
-            ISOWebVTTCue cueData = ISOWebVTTCue(MediaTime::createWithDouble(presentationTime), toMediaTime(timingInfo.duration), buffer.get());
-#else
-            ISOWebVTTCue cueData = ISOWebVTTCue(MediaTime::createWithDouble(presentationTime), MediaTime::createWithDouble(CMTimeGetSeconds(timingInfo.duration)), buffer.get());
-#endif
+            ISOWebVTTCue cueData = ISOWebVTTCue(presentationTime, toMediaTime(timingInfo.duration), buffer.get());
             LOG(Media, "    sample presentation time = %.2f, duration = %.2f", cueData.presentationTime().toDouble(), cueData.duration().toDouble());
             LOG(Media, "    id = \"%s\", settings = \"%s\", cue text = \"%s\"", cueData.id().utf8().data(), cueData.settings().utf8().data(), cueData.cueText().utf8().data());
             LOG(Media, "    sourceID = \"%s\", originalStartTime = \"%s\"", cueData.sourceID().utf8().data(), cueData.originalStartTime().utf8().data());
index 540bbbb..15f10f5 100644 (file)
@@ -55,7 +55,7 @@ public:
     bool hasBeenReported() const { return m_hasBeenReported; }
     void setHasBeenReported(bool reported) { m_hasBeenReported = reported; }
 
-    virtual void processCue(CFArrayRef attributedStrings, CFArrayRef nativeSamples, double);
+    virtual void processCue(CFArrayRef attributedStrings, CFArrayRef nativeSamples, const MediaTime&);
     virtual void resetCueValues();
 
     void beginSeeking();
@@ -68,19 +68,19 @@ public:
         InBand
     };
     virtual Category textTrackCategory() const = 0;
-
-    virtual double startTimeVariance() const override { return 0.25; }
-
+    
+    virtual MediaTime startTimeVariance() const override { return MediaTime(1, 4); }
+    
 protected:
     InbandTextTrackPrivateAVF(AVFInbandTrackParent*, CueFormat);
 
     void processCueAttributes(CFAttributedStringRef, GenericCueData&);
-    void processAttributedStrings(CFArrayRef, double);
-    void processNativeSamples(CFArrayRef, double);
+    void processAttributedStrings(CFArrayRef, const MediaTime&);
+    void processNativeSamples(CFArrayRef, const MediaTime&);
     void removeCompletedCues();
 
-    double m_currentCueStartTime;
-    double m_currentCueEndTime;
+    MediaTime m_currentCueStartTime;
+    MediaTime m_currentCueEndTime;
 
     Vector<RefPtr<GenericCueData>> m_cues;
     AVFInbandTrackParent* m_owner;
index 1cbb5dd..637b8e3 100644 (file)
@@ -58,12 +58,9 @@ MediaPlayerPrivateAVFoundation::MediaPlayerPrivateAVFoundation(MediaPlayer* play
     , m_networkState(MediaPlayer::Empty)
     , m_readyState(MediaPlayer::HaveNothing)
     , m_preload(MediaPlayer::Auto)
-    , m_cachedMaxTimeLoaded(0)
-    , m_cachedMaxTimeSeekable(0)
-    , m_cachedMinTimeSeekable(0)
-    , m_cachedDuration(MediaPlayer::invalidTime())
-    , m_reportedDuration(MediaPlayer::invalidTime())
-    , m_maxTimeLoadedAtLastDidLoadingProgress(MediaPlayer::invalidTime())
+    , m_cachedDuration(MediaTime::invalidTime())
+    , m_reportedDuration(MediaTime::invalidTime())
+    , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::invalidTime())
     , m_requestedRate(1)
     , m_delayCallbacks(0)
     , m_delayCharacteristicsChangedNotification(0)
@@ -245,37 +242,29 @@ void MediaPlayerPrivateAVFoundation::pause()
     platformPause();
 }
 
-float MediaPlayerPrivateAVFoundation::duration() const
+MediaTime MediaPlayerPrivateAVFoundation::durationMediaTime() const
 {
-    return narrowPrecisionToFloat(durationDouble());
-}
-
-double MediaPlayerPrivateAVFoundation::durationDouble() const
-{
-    if (m_cachedDuration != MediaPlayer::invalidTime())
+    if (m_cachedDuration.isValid())
         return m_cachedDuration;
 
-    double duration = platformDuration();
-    if (!duration || duration == MediaPlayer::invalidTime())
-        return 0;
+    MediaTime duration = platformDuration();
+    if (!duration || duration.isInvalid())
+        return MediaTime::zeroTime();
 
     m_cachedDuration = duration;
-    LOG(Media, "MediaPlayerPrivateAVFoundation::duration(%p) - caching %g", this, m_cachedDuration);
+    LOG(Media, "MediaPlayerPrivateAVFoundation::duration(%p) - caching %s", this, toString(m_cachedDuration).utf8().data());
     return m_cachedDuration;
 }
 
-float MediaPlayerPrivateAVFoundation::currentTime() const
+void MediaPlayerPrivateAVFoundation::seek(const MediaTime& time)
 {
-    return narrowPrecisionToFloat(currentTimeDouble());
+    seekWithTolerance(time, MediaTime::zeroTime(), MediaTime::zeroTime());
 }
 
-void MediaPlayerPrivateAVFoundation::seek(float time)
+void MediaPlayerPrivateAVFoundation::seekWithTolerance(const MediaTime& mediaTime, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
 {
-    seekWithTolerance(time, 0, 0);
-}
+    MediaTime time = mediaTime;
 
-void MediaPlayerPrivateAVFoundation::seekWithTolerance(double time, double negativeTolerance, double positiveTolerance)
-{
     if (m_seeking) {
         LOG(Media, "MediaPlayerPrivateAVFoundation::seekWithTolerance(%p) - save pending seek", this);
         m_pendingSeek = [this, time, negativeTolerance, positiveTolerance]() {
@@ -288,16 +277,16 @@ void MediaPlayerPrivateAVFoundation::seekWithTolerance(double time, double negat
     if (!metaDataAvailable())
         return;
 
-    if (time > durationDouble())
-        time = durationDouble();
+    if (time > durationMediaTime())
+        time = durationMediaTime();
 
-    if (currentTimeDouble() == time)
+    if (currentMediaTime() == time)
         return;
 
     if (currentTextTrack())
         currentTextTrack()->beginSeeking();
 
-    LOG(Media, "MediaPlayerPrivateAVFoundation::seek(%p) - seeking to %f", this, time);
+    LOG(Media, "MediaPlayerPrivateAVFoundation::seek(%p) - seeking to %s", this, toString(time).utf8().data());
 
     seekToTime(time, negativeTolerance, positiveTolerance);
 }
@@ -407,34 +396,34 @@ std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundation::buffered() c
     return PlatformTimeRanges::create(*m_cachedLoadedTimeRanges);
 }
 
-double MediaPlayerPrivateAVFoundation::maxTimeSeekableDouble() const
+MediaTime MediaPlayerPrivateAVFoundation::maxMediaTimeSeekable() const
 {
     if (!metaDataAvailable())
-        return 0;
+        return MediaTime::zeroTime();
 
     if (!m_cachedMaxTimeSeekable)
         m_cachedMaxTimeSeekable = platformMaxTimeSeekable();
 
-    LOG(Media, "MediaPlayerPrivateAVFoundation::maxTimeSeekable(%p) - returning %f", this, m_cachedMaxTimeSeekable);
+    LOG(Media, "MediaPlayerPrivateAVFoundation::maxTimeSeekable(%p) - returning %s", this, toString(m_cachedMaxTimeSeekable).utf8().data());
     return m_cachedMaxTimeSeekable;   
 }
 
-double MediaPlayerPrivateAVFoundation::minTimeSeekable() const
+MediaTime MediaPlayerPrivateAVFoundation::minMediaTimeSeekable() const
 {
     if (!metaDataAvailable())
-        return 0;
+        return MediaTime::zeroTime();
 
     if (!m_cachedMinTimeSeekable)
         m_cachedMinTimeSeekable = platformMinTimeSeekable();
 
-    LOG(Media, "MediaPlayerPrivateAVFoundation::minTimeSeekable(%p) - returning %f", this, m_cachedMinTimeSeekable);
+    LOG(Media, "MediaPlayerPrivateAVFoundation::minTimeSeekable(%p) - returning %s", this, toString(m_cachedMinTimeSeekable).utf8().data());
     return m_cachedMinTimeSeekable;
 }
 
-float MediaPlayerPrivateAVFoundation::maxTimeLoaded() const
+MediaTime MediaPlayerPrivateAVFoundation::maxTimeLoaded() const
 {
     if (!metaDataAvailable())
-        return 0;
+        return MediaTime::zeroTime();
 
     if (!m_cachedMaxTimeLoaded)
         m_cachedMaxTimeLoaded = platformMaxTimeLoaded();
@@ -446,7 +435,7 @@ bool MediaPlayerPrivateAVFoundation::didLoadingProgress() const
 {
     if (!duration() || !totalBytes())
         return false;
-    float currentMaxTimeLoaded = maxTimeLoaded();
+    MediaTime currentMaxTimeLoaded = maxTimeLoaded();
     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
 
@@ -545,7 +534,7 @@ void MediaPlayerPrivateAVFoundation::updateStates()
                 FALLTHROUGH;
 
             case MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty:
-                if (maxTimeLoaded() > currentTime())
+                if (maxTimeLoaded() > currentMediaTime())
                     m_readyState = MediaPlayer::HaveFutureData;
                 else
                     m_readyState = MediaPlayer::HaveCurrentData;
@@ -557,7 +546,7 @@ void MediaPlayerPrivateAVFoundation::updateStates()
             else if (itemStatus == MediaPlayerAVPlayerItemStatusFailed)
                 m_networkState = MediaPlayer::DecodeError;
             else if (itemStatus != MediaPlayerAVPlayerItemStatusPlaybackBufferFull && itemStatus >= MediaPlayerAVPlayerItemStatusReadyToPlay)
-                m_networkState = (maxTimeLoaded() == duration()) ? MediaPlayer::Loaded : MediaPlayer::Loading;
+                m_networkState = (maxTimeLoaded() == durationMediaTime()) ? MediaPlayer::Loaded : MediaPlayer::Loading;
         }
     }
 
@@ -640,19 +629,19 @@ void MediaPlayerPrivateAVFoundation::rateChanged()
 void MediaPlayerPrivateAVFoundation::loadedTimeRangesChanged()
 {
     m_cachedLoadedTimeRanges = nullptr;
-    m_cachedMaxTimeLoaded = 0;
+    m_cachedMaxTimeLoaded = MediaTime::zeroTime();
     invalidateCachedDuration();
 }
 
 void MediaPlayerPrivateAVFoundation::seekableTimeRangesChanged()
 {
-    m_cachedMaxTimeSeekable = 0;
-    m_cachedMinTimeSeekable = 0;
+    m_cachedMaxTimeSeekable = MediaTime::zeroTime();
+    m_cachedMinTimeSeekable = MediaTime::zeroTime();
 }
 
-void MediaPlayerPrivateAVFoundation::timeChanged(double time)
+void MediaPlayerPrivateAVFoundation::timeChanged(const MediaTime& time)
 {
-    LOG(Media, "MediaPlayerPrivateAVFoundation::timeChanged(%p) - time = %f", this, time);
+    LOG(Media, "MediaPlayerPrivateAVFoundation::timeChanged(%p) - time = %s", this, toString(time).utf8().data());
     UNUSED_PARAM(time);
 }
 
@@ -683,8 +672,8 @@ void MediaPlayerPrivateAVFoundation::didEnd()
 {
     // Hang onto the current time and use it as duration from now on since we are definitely at
     // the end of the movie. Do this because the initial duration is sometimes an estimate.
-    double now = currentTimeDouble();
-    if (now > 0)
+    MediaTime now = currentMediaTime();
+    if (now > MediaTime::zeroTime())
         m_cachedDuration = now;
 
     updateStates();
@@ -695,13 +684,13 @@ void MediaPlayerPrivateAVFoundation::invalidateCachedDuration()
 {
     LOG(Media, "MediaPlayerPrivateAVFoundation::invalidateCachedDuration(%p)", this);
     
-    m_cachedDuration = MediaPlayer::invalidTime();
+    m_cachedDuration = MediaTime::invalidTime();
 
     // For some media files, reported duration is estimated and updated as media is loaded
     // so report duration changed when the estimate is upated.
-    float duration = this->duration();
+    MediaTime duration = this->durationMediaTime();
     if (duration != m_reportedDuration) {
-        if (m_reportedDuration != MediaPlayer::invalidTime())
+        if (m_reportedDuration.isValid())
             m_player->durationChanged();
         m_reportedDuration = duration;
     }
@@ -772,7 +761,7 @@ void MediaPlayerPrivateAVFoundation::clearMainThreadPendingFlag()
     m_mainThreadCallPending = false;
 }
 
-void MediaPlayerPrivateAVFoundation::scheduleMainThreadNotification(Notification::Type type, double time)
+void MediaPlayerPrivateAVFoundation::scheduleMainThreadNotification(Notification::Type type, const MediaTime& time)
 {
     scheduleMainThreadNotification(Notification(type, time));
 }
@@ -959,11 +948,11 @@ void MediaPlayerPrivateAVFoundation::trackModeChanged()
 
 size_t MediaPlayerPrivateAVFoundation::extraMemoryCost() const
 {
-    double duration = durationDouble();
+    MediaTime duration = this->durationMediaTime();
     if (!duration)
         return 0;
 
-    unsigned long long extra = totalBytes() * buffered()->totalDuration().toDouble() / duration;
+    unsigned long long extra = totalBytes() * buffered()->totalDuration().toDouble() / duration.toDouble();
     return static_cast<unsigned>(extra);
 }
 
index 8e9b54c..660b0c0 100644 (file)
@@ -53,7 +53,7 @@ public:
     virtual void rateChanged();
     virtual void loadedTimeRangesChanged();
     virtual void seekableTimeRangesChanged();
-    virtual void timeChanged(double);
+    virtual void timeChanged(const MediaTime&);
     virtual void seekCompleted(bool);
     virtual void didEnd();
     virtual void contentsNeedsDisplay() { }
@@ -96,12 +96,11 @@ public:
         
         Notification()
             : m_type(None)
-            , m_time(0)
             , m_finished(false)
         {
         }
 
-        Notification(Type type, double time)
+        Notification(Type type, const MediaTime& time)
             : m_type(type)
             , m_time(time)
             , m_finished(false)
@@ -110,14 +109,12 @@ public:
         
         Notification(Type type, bool finished)
             : m_type(type)
-            , m_time(0)
             , m_finished(finished)
         {
         }
 
         Notification(std::function<void ()> function)
             : m_type(FunctionType)
-            , m_time(0)
             , m_finished(false)
             , m_function(function)
         {
@@ -125,19 +122,19 @@ public:
         
         Type type() { return m_type; }
         bool isValid() { return m_type != None; }
-        double time() { return m_time; }
+        MediaTime time() { return m_time; }
         bool finished() { return m_finished; }
         std::function<void ()>& function() { return m_function; }
         
     private:
         Type m_type;
-        double m_time;
+        MediaTime m_time;
         bool m_finished;
         std::function<void ()> m_function;
     };
 
     void scheduleMainThreadNotification(Notification);
-    void scheduleMainThreadNotification(Notification::Type, double time = 0);
+    void scheduleMainThreadNotification(Notification::Type, const MediaTime& = MediaTime::zeroTime());
     void scheduleMainThreadNotification(Notification::Type, bool completed);
     void dispatchNotification();
     void clearMainThreadPendingFlag();
@@ -169,12 +166,10 @@ protected:
     virtual bool hasVideo() const override { return m_cachedHasVideo; }
     virtual bool hasAudio() const override { return m_cachedHasAudio; }
     virtual void setVisible(bool) override;
-    virtual float duration() const override;
-    virtual double durationDouble() const override;
-    virtual float currentTime() const override;
-    virtual double currentTimeDouble() const = 0;
-    virtual void seek(float) override;
-    virtual void seekWithTolerance(double, double, double) override;
+    virtual MediaTime durationMediaTime() const override;
+    virtual MediaTime currentMediaTime() const = 0;
+    virtual void seek(const MediaTime&) override;
+    virtual void seekWithTolerance(const MediaTime&, const MediaTime&, const MediaTime&) override;
     virtual bool seeking() const override;
     virtual void setRate(float) override;
     virtual bool paused() const override;
@@ -183,8 +178,8 @@ protected:
     virtual void setClosedCaptionsVisible(bool) = 0;
     virtual MediaPlayer::NetworkState networkState() const override { return m_networkState; }
     virtual MediaPlayer::ReadyState readyState() const override { return m_readyState; }
-    virtual double maxTimeSeekableDouble() const override;
-    virtual double minTimeSeekable() const override;
+    virtual MediaTime maxMediaTimeSeekable() const override;
+    virtual MediaTime minMediaTimeSeekable() const override;
     virtual std::unique_ptr<PlatformTimeRanges> buffered() const override;
     virtual bool didLoadingProgress() const override;
     virtual void setSize(const IntSize&) override;
@@ -199,7 +194,6 @@ protected:
 
     virtual MediaPlayer::MovieLoadType movieLoadType() const;
     virtual void prepareForRendering();
-    virtual float mediaTimeForTimeValue(float) const = 0;
 
     virtual bool supportsFullscreen() const;
     virtual bool supportsScanning() const { return true; }
@@ -238,13 +232,13 @@ protected:
     virtual void checkPlayability() = 0;
     virtual void updateRate() = 0;
     virtual float rate() const = 0;
-    virtual void seekToTime(double time, double negativeTolerance, double positiveTolerance) = 0;
+    virtual void seekToTime(const MediaTime&, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance) = 0;
     virtual unsigned long long totalBytes() const = 0;
     virtual std::unique_ptr<PlatformTimeRanges> platformBufferedTimeRanges() const = 0;
-    virtual double platformMaxTimeSeekable() const = 0;
-    virtual double platformMinTimeSeekable() const = 0;
-    virtual float platformMaxTimeLoaded() const = 0;
-    virtual double platformDuration() const = 0;
+    virtual MediaTime platformMaxTimeSeekable() const = 0;
+    virtual MediaTime platformMinTimeSeekable() const = 0;
+    virtual MediaTime platformMaxTimeLoaded() const = 0;
+    virtual MediaTime platformDuration() const = 0;
 
     virtual void beginLoadingMetadata() = 0;
     virtual void tracksChanged() = 0;
@@ -282,7 +276,7 @@ protected:
 
     bool metaDataAvailable() const { return m_readyState >= MediaPlayer::HaveMetadata; }
     float requestedRate() const { return m_requestedRate; }
-    float maxTimeLoaded() const;
+    MediaTime maxTimeLoaded() const;
     bool isReadyForVideoSetup() const;
     virtual void setUpVideoRendering();
     virtual void tearDownVideoRendering();
@@ -328,12 +322,12 @@ private:
     MediaPlayer::Preload m_preload;
 
     IntSize m_cachedNaturalSize;
-    mutable float m_cachedMaxTimeLoaded;
-    mutable double m_cachedMaxTimeSeekable;
-    mutable double m_cachedMinTimeSeekable;
-    mutable double m_cachedDuration;
-    float m_reportedDuration;
-    mutable float m_maxTimeLoadedAtLastDidLoadingProgress;
+    mutable MediaTime m_cachedMaxTimeLoaded;
+    mutable MediaTime m_cachedMaxTimeSeekable;
+    mutable MediaTime m_cachedMinTimeSeekable;
+    mutable MediaTime m_cachedDuration;
+    MediaTime m_reportedDuration;
+    mutable MediaTime m_maxTimeLoadedAtLastDidLoadingProgress;
     float m_requestedRate;
     mutable int m_delayCallbacks;
     int m_delayCharacteristicsChangedNotification;
  */
 
 #include "config.h"
-#include "MediaTimeMac.h"
+#include "MediaTimeAVFoundation.h"
 
-#if USE(COREMEDIA)
+#if USE(AVFOUNDATION)
 
 namespace WebCore {
 
+static bool CMTimeHasFlags(const CMTime& cmTime, uint32_t flags)
+{
+    return (cmTime.flags & flags) == flags;
+}
+
 MediaTime toMediaTime(const CMTime& cmTime)
 {
     uint32_t flags = 0;
-    if (CMTIME_IS_VALID(cmTime))
+    if (CMTimeHasFlags(cmTime, kCMTimeFlags_Valid))
         flags |= MediaTime::Valid;
-    if (CMTIME_HAS_BEEN_ROUNDED(cmTime))
+    if (CMTimeHasFlags(cmTime, kCMTimeFlags_Valid | kCMTimeFlags_HasBeenRounded))
         flags |= MediaTime::HasBeenRounded;
-    if (CMTIME_IS_POSITIVE_INFINITY(cmTime))
+    if (CMTimeHasFlags(cmTime, kCMTimeFlags_Valid | kCMTimeFlags_PositiveInfinity))
         flags |= MediaTime::PositiveInfinite;
-    if (CMTIME_IS_NEGATIVE_INFINITY(cmTime))
+    if (CMTimeHasFlags(cmTime, kCMTimeFlags_Valid | kCMTimeFlags_NegativeInfinity))
         flags |= MediaTime::NegativeInfinite;
-    if (CMTIME_IS_INDEFINITE(cmTime))
+    if (CMTimeHasFlags(cmTime, kCMTimeFlags_Valid | kCMTimeFlags_Indefinite))
         flags |= MediaTime::Indefinite;
 
     return MediaTime(cmTime.value, cmTime.timescale, flags);
  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
  */
 
-#ifndef MediaTimeMac_h
-#define MediaTimeMac_h
+#ifndef MediaTimeAVFoundation_h
+#define MediaTimeAVFoundation_h
 
-#if USE(COREMEDIA)
+#if USE(AVFOUNDATION)
 
 #include <CoreMedia/CMTime.h>
 #include <wtf/MediaTime.h>
index cdbdf07..5aac47d 100644 (file)
@@ -42,6 +42,7 @@
 #else
 #include "InbandTextTrackPrivateLegacyAVCF.h"
 #endif
+#include "MediaTimeAVFoundation.h"
 #include "URL.h"
 #include "Logging.h"
 #include "PlatformCALayerWin.h"
@@ -106,7 +107,7 @@ public:
 
     void createImageGenerator();
     void destroyImageGenerator();
-    RetainPtr<CGImageRef> createImageForTimeInRect(float, const IntRect&);
+    RetainPtr<CGImageRef> createImageForTimeInRect(const MediaTime&, const IntRect&);
 
     void createAssetForURL(const String& url, bool inheritURI);
     void setAsset(AVCFURLAssetRef);
@@ -117,7 +118,7 @@ public:
     void checkPlayability();
     void beginLoadingMetadata();
     
-    void seekToTime(double, double, double);
+    void seekToTime(const MediaTime&, const MediaTime&, const MediaTime&);
     void updateVideoLayerGravity();
 
     void setCurrentTextTrack(InbandTextTrackPrivateAVF*);
@@ -569,10 +570,10 @@ void MediaPlayerPrivateAVFoundationCF::platformPause()
     setDelayCallbacks(false);
 }
 
-double MediaPlayerPrivateAVFoundationCF::platformDuration() const
+MediaTime MediaPlayerPrivateAVFoundationCF::platformDuration() const
 {
     if (!metaDataAvailable() || !avAsset(m_avfWrapper))
-        return 0;
+        return MediaTime::zeroTime();
 
     CMTime cmDuration;
 
@@ -583,28 +584,28 @@ double MediaPlayerPrivateAVFoundationCF::platformDuration() const
         cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper));
 
     if (CMTIME_IS_NUMERIC(cmDuration))
-        return CMTimeGetSeconds(cmDuration);
+        return toMediaTime(cmDuration);
 
     if (CMTIME_IS_INDEFINITE(cmDuration))
-        return numeric_limits<double>::infinity();
+        return MediaTime::positiveInfiniteTime();
 
-    LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %.0f", this, static_cast<float>(MediaPlayer::invalidTime()));
-    return static_cast<float>(MediaPlayer::invalidTime());
+    LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
+    return MediaTime::invalidTime();
 }
 
-double MediaPlayerPrivateAVFoundationCF::currentTimeDouble() const
+MediaTime MediaPlayerPrivateAVFoundationCF::currentMediaTime() const
 {
     if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper))
-        return 0;
+        return MediaTime::zeroTime();
 
     CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper));
     if (CMTIME_IS_NUMERIC(itemTime))
-        return std::max(CMTimeGetSeconds(itemTime), 0.0);
+        return max(toMediaTime(itemTime), MediaTime::zeroTime());
 
-    return 0;
+    return MediaTime::zeroTime();
 }
 
-void MediaPlayerPrivateAVFoundationCF::seekToTime(double time, double negativeTolerance, double positiveTolerance)
+void MediaPlayerPrivateAVFoundationCF::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
 {
     if (!m_avfWrapper)
         return;
@@ -685,22 +686,22 @@ std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationCF::platformBu
         CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey())));
         
         if (timeRangeIsValidAndNotEmpty(start, duration)) {
-            double rangeStart = CMTimeGetSeconds(start);
-            double rangeEnd = rangeStart + CMTimeGetSeconds(duration);
-            timeRanges->add(MediaTime::createWithDouble(rangeStart), MediaTime::createWithDouble(rangeEnd));
+            MediaTime rangeStart = toMediaTime(start);
+            MediaTime rangeEnd = rangeStart + toMediaTime(duration);
+            timeRanges->add(rangeStart, rangeEnd);
         }
     }
 
     return timeRanges;
 }
 
-double MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 
+MediaTime MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 
 { 
     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
     if (!seekableRanges) 
-        return 0
+        return MediaTime::zeroTime()
 
-    double minTimeSeekable = std::numeric_limits<double>::infinity(); 
+    MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
     bool hasValidRange = false; 
     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
     for (CFIndex i = 0; i < rangeCount; i++) {
@@ -711,23 +712,23 @@ double MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const
             continue;
 
         hasValidRange = true; 
-        double startOfRange = CMTimeGetSeconds(start); 
+        MediaTime startOfRange = toMediaTime(start); 
         if (minTimeSeekable > startOfRange) 
             minTimeSeekable = startOfRange; 
     } 
-    return hasValidRange ? minTimeSeekable : 0
+    return hasValidRange ? minTimeSeekable : MediaTime::zeroTime()
 } 
 
-double MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
+MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
 {
     if (!avPlayerItem(m_avfWrapper))
-        return 0;
+        return MediaTime::zeroTime();
 
     RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper)));
     if (!seekableRanges)
-        return 0;
+        return MediaTime::zeroTime();
 
-    double maxTimeSeekable = 0;
+    MediaTime maxTimeSeekable;
     CFIndex rangeCount = CFArrayGetCount(seekableRanges.get());
     for (CFIndex i = 0; i < rangeCount; i++) {
         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i));
@@ -736,7 +737,7 @@ double MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
         if (!timeRangeIsValidAndNotEmpty(start, duration))
             continue;
         
-        double endOfRange = CMTimeGetSeconds(CMTimeAdd(start, duration));
+        MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
         if (maxTimeSeekable < endOfRange)
             maxTimeSeekable = endOfRange;
     }
@@ -744,16 +745,16 @@ double MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const
     return maxTimeSeekable;   
 }
 
-float MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
+MediaTime MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
 {
     if (!avPlayerItem(m_avfWrapper))
-        return 0;
+        return MediaTime::zeroTime();
 
     RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper)));
     if (!loadedRanges)
-        return 0;
+        return MediaTime::zeroTime();
 
-    float maxTimeLoaded = 0;
+    MediaTime maxTimeLoaded;
     CFIndex rangeCount = CFArrayGetCount(loadedRanges.get());
     for (CFIndex i = 0; i < rangeCount; i++) {
         CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i));
@@ -762,7 +763,7 @@ float MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const
         if (!timeRangeIsValidAndNotEmpty(start, duration))
             continue;
         
-        float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeAdd(start, duration)));
+        MediaTime endOfRange = toMediaTime(CMTimeAdd(start, duration));
         if (maxTimeLoaded < endOfRange)
             maxTimeLoaded = endOfRange;
     }
@@ -847,7 +848,7 @@ void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext* context, const Int
     LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this);
 
     setDelayCallbacks(true);
-    RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentTime(), rect);
+    RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentMediaTime(), rect);
     if (image) {
         context->save();
         context->translate(rect.x(), rect.y() + rect.height());
@@ -930,7 +931,7 @@ void MediaPlayerPrivateAVFoundationCF::didStopLoadingRequest(AVCFAssetResourceLo
 }
 #endif
 
-float MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(float timeValue) const
+MediaTime MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(const MediaTime& timeValue) const
 {
     if (!metaDataAvailable())
         return timeValue;
@@ -1692,12 +1693,12 @@ void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void
     self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
 }
 
-void AVFWrapper::seekToTime(double time, double negativeTolerance, double positiveTolerance)
+void AVFWrapper::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
 {
     ASSERT(avPlayerItem());
-    CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
-    CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
-    CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
+    CMTime cmTime = toCMTime(time);
+    CMTime cmBefore = toCMTime(negativeTolerance);
+    CMTime cmAfter = toCMTime(positiveTolerance);
     AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), cmTime, cmBefore, cmAfter, &seekCompletedCallback, callbackContext());
 }
 
@@ -1705,10 +1706,10 @@ void AVFWrapper::seekToTime(double time, double negativeTolerance, double positi
 struct LegibleOutputData {
     RetainPtr<CFArrayRef> m_attributedStrings;
     RetainPtr<CFArrayRef> m_samples;
-    double m_time;
+    MediaTime m_time;
     void* m_context;
 
-    LegibleOutputData(CFArrayRef strings, CFArrayRef samples, double time, void* context)
+    LegibleOutputData(CFArrayRef strings, CFArrayRef samples, const MediaTime &time, void* context)
         : m_attributedStrings(strings), m_samples(samples), m_time(time), m_context(context)
     {
     }
@@ -1751,7 +1752,7 @@ void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutpu
 
     ASSERT(legibleOutput == self->m_legibleOutput);
 
-    auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, CMTimeGetSeconds(itemTime), context);
+    auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, toMediaTime(itemTime), context);
 
     dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.release(), processCue);
 }
@@ -1948,7 +1949,7 @@ void AVFWrapper::destroyImageGenerator()
     m_imageGenerator = 0;
 }
 
-RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(float time, const IntRect& rect)
+RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(const MediaTime& time, const IntRect& rect)
 {
     if (!m_imageGenerator)
         return 0;
@@ -1958,7 +1959,7 @@ RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(float time, const Int
 #endif
 
     AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size()));
-    RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), CMTimeMakeWithSeconds(time, 600), 0, 0));
+    RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), toCMTime(time), 0, 0));
     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawimage.get(), adoptCF(CGColorSpaceCreateDeviceRGB()).get()));
 
 #if !LOG_DISABLED
index 97cb0e9..7fcc6a8 100644 (file)
@@ -73,14 +73,14 @@ private:
     virtual void platformSetVisible(bool);
     virtual void platformPlay();
     virtual void platformPause();
-    virtual double currentTimeDouble() const override;
+    virtual MediaTime currentMediaTime() const override;
     virtual void setVolume(float);
     virtual void setClosedCaptionsVisible(bool);
     virtual void paint(GraphicsContext*, const IntRect&);
     virtual void paintCurrentFrameInContext(GraphicsContext*, const IntRect&);
     virtual PlatformLayer* platformLayer() const;
     virtual bool supportsAcceleratedRendering() const { return true; }
-    virtual float mediaTimeForTimeValue(float) const;
+    virtual MediaTime mediaTimeForTimeValue(const MediaTime&) const;
 
     virtual void createAVPlayer();
     virtual void createAVPlayerItem();
@@ -91,13 +91,13 @@ private:
     virtual void checkPlayability();
     virtual void updateRate();
     virtual float rate() const;
-    virtual void seekToTime(double time, double negativeTolerance, double positiveTolerance);
+    virtual void seekToTime(const MediaTime&, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance);
     virtual unsigned long long totalBytes() const;
     virtual std::unique_ptr<PlatformTimeRanges> platformBufferedTimeRanges() const;
-    virtual double platformMinTimeSeekable() const;
-    virtual double platformMaxTimeSeekable() const;
-    virtual double platformDuration() const;
-    virtual float platformMaxTimeLoaded() const;
+    virtual MediaTime platformMinTimeSeekable() const;
+    virtual MediaTime platformMaxTimeSeekable() const;
+    virtual MediaTime platformDuration() const;
+    virtual MediaTime platformMaxTimeLoaded() const;
     virtual void beginLoadingMetadata();
     virtual void sizeChanged();
     virtual bool requiresImmediateCompositing() const override;
index ff5fe01..3e2c1b9 100644 (file)
@@ -76,7 +76,7 @@ public:
 
 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
     RetainPtr<AVPlayerItem> playerItem() const { return m_avPlayerItem; }
-    void processCue(NSArray *, NSArray *, double);
+    void processCue(NSArray *, NSArray *, const MediaTime&);
     void flushCues();
 #endif
     
@@ -103,9 +103,9 @@ public:
     void tracksDidChange(RetainPtr<NSArray>);
     void hasEnabledAudioDidChange(bool);
     void presentationSizeDidChange(FloatSize);
-    void durationDidChange(double);
+    void durationDidChange(const MediaTime&);
     void rateDidChange(double);
-    void metadataDidArrive(RetainPtr<NSArray>, double);
+    void metadataDidArrive(RetainPtr<NSArray>, const MediaTime&);
     void firstFrameAvailableDidChange(bool);
     void trackEnabledDidChange(bool);
     void canPlayFastReverseDidChange(bool);
@@ -146,7 +146,7 @@ private:
     virtual void platformSetVisible(bool);
     virtual void platformPlay();
     virtual void platformPause();
-    virtual double currentTimeDouble() const override;
+    virtual MediaTime currentMediaTime() const override;
     virtual void setVolume(float);
     virtual void setClosedCaptionsVisible(bool);
     virtual void paint(GraphicsContext*, const IntRect&);
@@ -163,7 +163,7 @@ private:
 #endif
 
     virtual bool supportsAcceleratedRendering() const { return true; }
-    virtual float mediaTimeForTimeValue(float) const;
+    virtual MediaTime mediaTimeForTimeValue(const MediaTime&) const;
     virtual double maximumDurationToCacheMediaTime() const { return 5; }
 
     virtual void createAVPlayer();
@@ -176,13 +176,13 @@ private:
     virtual void checkPlayability();
     virtual void updateRate();
     virtual float rate() const;
-    virtual void seekToTime(double time, double negativeTolerance, double positiveTolerance);
+    virtual void seekToTime(const MediaTime&, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance);
     virtual unsigned long long totalBytes() const;
     virtual std::unique_ptr<PlatformTimeRanges> platformBufferedTimeRanges() const;
-    virtual double platformMinTimeSeekable() const;
-    virtual double platformMaxTimeSeekable() const;
-    virtual double platformDuration() const;
-    virtual float platformMaxTimeLoaded() const;
+    virtual MediaTime platformMinTimeSeekable() const;
+    virtual MediaTime platformMaxTimeSeekable() const;
+    virtual MediaTime platformDuration() const;
+    virtual MediaTime platformMaxTimeLoaded() const;
     virtual void beginLoadingMetadata();
     virtual void sizeChanged();
 
@@ -329,7 +329,7 @@ private:
     RetainPtr<NSArray> m_cachedTracks;
     RetainPtr<NSArray> m_currentMetaData;
     FloatSize m_cachedPresentationSize;
-    double m_cachedDuration;
+    MediaTime m_cachedDuration;
     double m_cachedRate;
     mutable long long m_cachedTotalBytes;
     unsigned m_pendingStatusChanges;
index 8f08211..44decb6 100644 (file)
@@ -46,7 +46,7 @@
 #import "OutOfBandTextTrackPrivateAVF.h"
 #import "URL.h"
 #import "Logging.h"
-#import "MediaTimeMac.h"
+#import "MediaTimeAVFoundation.h"
 #import "PlatformTimeRanges.h"
 #import "SecurityOrigin.h"
 #import "SerializedPlatformRepresentationMac.h"
@@ -399,7 +399,6 @@ MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlay
     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
 #endif
     , m_currentTextTrack(0)
-    , m_cachedDuration(MediaPlayer::invalidTime())
     , m_cachedRate(0)
     , m_cachedTotalBytes(0)
     , m_pendingStatusChanges(0)
@@ -494,7 +493,7 @@ void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
     m_cachedLoadedRanges = nullptr;
     m_cachedHasEnabledAudio = false;
     m_cachedPresentationSize = FloatSize();
-    m_cachedDuration = 0;
+    m_cachedDuration = MediaTime::zeroTime();
 
     for (AVPlayerItemTrack *track in m_cachedTracks.get())
         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
@@ -1130,12 +1129,12 @@ void MediaPlayerPrivateAVFoundationObjC::platformPause()
     setDelayCallbacks(false);
 }
 
-double MediaPlayerPrivateAVFoundationObjC::platformDuration() const
+MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
 {
     // Do not ask the asset for duration before it has been loaded or it will fetch the
     // answer synchronously.
     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
-        return MediaPlayer::invalidTime();
+        return MediaTime::invalidTime();
     
     CMTime cmDuration;
     
@@ -1143,32 +1142,31 @@ double MediaPlayerPrivateAVFoundationObjC::platformDuration() const
     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
         cmDuration = [m_avPlayerItem.get() duration];
     else
-        cmDuration= [m_avAsset.get() duration];
+        cmDuration = [m_avAsset.get() duration];
 
     if (CMTIME_IS_NUMERIC(cmDuration))
-        return CMTimeGetSeconds(cmDuration);
+        return toMediaTime(cmDuration);
 
-    if (CMTIME_IS_INDEFINITE(cmDuration)) {
-        return std::numeric_limits<double>::infinity();
-    }
+    if (CMTIME_IS_INDEFINITE(cmDuration))
+        return MediaTime::positiveInfiniteTime();
 
-    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
-    return MediaPlayer::invalidTime();
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, toString(MediaTime::invalidTime()).utf8().data());
+    return MediaTime::invalidTime();
 }
 
-double MediaPlayerPrivateAVFoundationObjC::currentTimeDouble() const
+MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
 {
     if (!metaDataAvailable() || !m_avPlayerItem)
-        return 0;
+        return MediaTime::zeroTime();
 
     CMTime itemTime = [m_avPlayerItem.get() currentTime];
     if (CMTIME_IS_NUMERIC(itemTime))
-        return std::max(CMTimeGetSeconds(itemTime), 0.0);
+        return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
 
-    return 0;
+    return MediaTime::zeroTime();
 }
 
-void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
+void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
 {
     // setCurrentTime generates several event callbacks, update afterwards.
     setDelayCallbacks(true);
@@ -1176,9 +1174,9 @@ void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negative
     if (m_metadataTrack)
         m_metadataTrack->flushPartialCues();
 
-    CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
-    CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
-    CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
+    CMTime cmTime = toCMTime(time);
+    CMTime cmBefore = toCMTime(negativeTolerance);
+    CMTime cmAfter = toCMTime(positiveTolerance);
 
     auto weakThis = createWeakPtr();
 
@@ -1249,12 +1247,12 @@ std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platform
     return timeRanges;
 }
 
-double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
+MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
 {
     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
-        return 0;
+        return MediaTime::zeroTime();
 
-    double minTimeSeekable = std::numeric_limits<double>::infinity();
+    MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
     bool hasValidRange = false;
     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
@@ -1262,32 +1260,32 @@ double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
             continue;
 
         hasValidRange = true;
-        double startOfRange = CMTimeGetSeconds(timeRange.start);
+        MediaTime startOfRange = toMediaTime(timeRange.start);
         if (minTimeSeekable > startOfRange)
             minTimeSeekable = startOfRange;
     }
-    return hasValidRange ? minTimeSeekable : 0;
+    return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
 }
 
-double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
+MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
 {
     if (!m_cachedSeekableRanges)
         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
 
-    double maxTimeSeekable = 0;
+    MediaTime maxTimeSeekable;
     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
             continue;
         
-        double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
+        MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
         if (maxTimeSeekable < endOfRange)
             maxTimeSeekable = endOfRange;
     }
     return maxTimeSeekable;
 }
 
-float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
+MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
 {
 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
@@ -1298,15 +1296,15 @@ float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
 #endif
 
     if (!m_cachedLoadedRanges)
-        return 0;
+        return MediaTime::zeroTime();
 
-    float maxTimeLoaded = 0;
+    MediaTime maxTimeLoaded;
     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
             continue;
         
-        float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
+        MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
         if (maxTimeLoaded < endOfRange)
             maxTimeLoaded = endOfRange;
     }
@@ -1592,7 +1590,7 @@ bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
     return AVFoundationLibrary() && CoreMediaLibrary();
 }
 
-float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
+MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
 {
     if (!metaDataAvailable())
         return timeValue;
@@ -2249,7 +2247,7 @@ void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
     player()->addTextTrack(m_metadataTrack);
 }
 
-void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, double time)
+void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
 {
     if (!m_currentTextTrack)
         return;
@@ -2526,11 +2524,11 @@ static const AtomicString& metadataType(NSString *avMetadataKeySpace)
 }
 #endif
 
-void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, double mediaTime)
+void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
 {
     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
 
-    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %.2f", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, mediaTime);
+    LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
 
 #if ENABLE(DATACUE_VALUE)
     if (seeking())
@@ -2545,19 +2543,19 @@ void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> me
         processMetadataTrack();
 
     // Set the duration of all incomplete cues before adding new ones.
-    double earliesStartTime = std::numeric_limits<double>::infinity();
+    MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
     for (AVMetadataItemType *item in m_currentMetaData.get()) {
-        double start = CMTimeGetSeconds(item.time);
-        if (start < earliesStartTime)
-            earliesStartTime = start;
+        MediaTime start = toMediaTime(item.time);
+        if (start < earliestStartTime)
+            earliestStartTime = start;
     }
-    m_metadataTrack->updatePendingCueEndTimes(earliesStartTime);
+    m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
 
     for (AVMetadataItemType *item in m_currentMetaData.get()) {
-        double start = CMTimeGetSeconds(item.time);
-        double end = std::numeric_limits<double>::infinity();
+        MediaTime start = toMediaTime(item.time);
+        MediaTime end = MediaTime::positiveInfiniteTime();
         if (CMTIME_IS_VALID(item.duration))
-            end = start + CMTimeGetSeconds(item.duration);
+            end = start + toMediaTime(item.duration);
 
         AtomicString type = nullAtom;
         if (item.keySpace)
@@ -2599,7 +2597,7 @@ void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize siz
     updateStates();
 }
 
-void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
+void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
 {
     m_cachedDuration = duration;
 
@@ -2768,12 +2766,12 @@ NSArray* assetTrackMetadataKeyNames()
         else if ([keyPath isEqualToString:@"presentationSize"])
             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
         else if ([keyPath isEqualToString:@"duration"])
-            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
+            function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
-            double now = 0;
+            MediaTime now;
             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
             if (CMTIME_IS_NUMERIC(itemTime))
-                now = std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
+                now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
@@ -2820,7 +2818,7 @@ NSArray* assetTrackMetadataKeyNames()
         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
         if (!callback)
             return;
-        callback->processCue(strongStrings.get(), strongSamples.get(), CMTimeGetSeconds(itemTime));
+        callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
     });
 }
 
index bcdfedf..0793469 100644 (file)
@@ -119,18 +119,18 @@ private:
 
     virtual void setVisible(bool) override;
 
-    virtual double durationDouble() const override;
-    virtual double currentTimeDouble() const override;
-    virtual double startTimeDouble() const override;
-    virtual double initialTime() const override;
+    virtual MediaTime durationMediaTime() const override;
+    virtual MediaTime currentMediaTime() const override;
+    virtual MediaTime startTime() const override;
+    virtual MediaTime initialTime() const override;
 
-    virtual void seekWithTolerance(double time, double negativeThreshold, double positiveThreshold) override;
+    virtual void seekWithTolerance(const MediaTime&, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold) override;
     virtual bool seeking() const override;
     virtual void setRateDouble(double) override;
 
     virtual std::unique_ptr<PlatformTimeRanges> seekable() const override;
-    virtual double maxTimeSeekableDouble() const override;
-    virtual double minTimeSeekable() const override;
+    virtual MediaTime maxMediaTimeSeekable() const override;
+    virtual MediaTime minMediaTimeSeekable() const override;
     virtual std::unique_ptr<PlatformTimeRanges> buffered() const override;
 
     virtual bool didLoadingProgress() const override;
@@ -159,9 +159,7 @@ private:
     virtual unsigned long totalVideoFrames() override;
     virtual unsigned long droppedVideoFrames() override;
     virtual unsigned long corruptedVideoFrames() override;
-    virtual double totalFrameDelay() override;
-
-    MediaTime currentMediaTime() const;
+    virtual MediaTime totalFrameDelay() override;
 
     void ensureLayer();
     void destroyLayer();
index 928420a..db6659f 100644 (file)
@@ -32,7 +32,7 @@
 #import "Logging.h"
 #import "MediaSourcePrivateAVFObjC.h"
 #import "MediaSourcePrivateClient.h"
-#import "MediaTimeMac.h"
+#import "MediaTimeAVFoundation.h"
 #import "PlatformClockCM.h"
 #import "SoftLinking.h"
 #import "WebCoreSystemInterface.h"
@@ -377,9 +377,9 @@ void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool)
     // No-op.
 }
 
-double MediaPlayerPrivateMediaSourceAVFObjC::durationDouble() const
+MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
 {
-    return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration().toDouble() : 0;
+    return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
 }
 
 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
@@ -392,27 +392,22 @@ MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
     return synchronizerTime;
 }
 
-double MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble() const
+MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
 {
-    return currentMediaTime().toDouble();
+    return MediaTime::zeroTime();
 }
 
-double MediaPlayerPrivateMediaSourceAVFObjC::startTimeDouble() const
+MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
 {
-    return 0;
-}
-
-double MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
-{
-    return 0;
+    return MediaTime::zeroTime();
 }
 
-void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(double time, double negativeThreshold, double positiveThreshold)
+void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
 {
     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(%p) - time(%s), negativeThreshold(%s), positiveThreshold(%s)", this, toString(time).utf8().data(), toString(negativeThreshold).utf8().data(), toString(positiveThreshold).utf8().data());
     m_seeking = true;
     auto weakThis = createWeakPtr();
-    m_pendingSeek = std::make_unique<PendingSeek>(MediaTime::createWithDouble(time), MediaTime::createWithDouble(negativeThreshold), MediaTime::createWithDouble(positiveThreshold));
+    m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
 
     if (m_seekTimer.isActive())
         m_seekTimer.stop();
@@ -435,7 +430,7 @@ void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
     if (!m_mediaSourcePrivate)
         return;
 
-    if (pendingSeek->negativeThreshold == MediaTime::zeroTime() && pendingSeek->positiveThreshold == MediaTime::zeroTime())
+    if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
         m_lastSeekTime = pendingSeek->targetTime;
     else
         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
@@ -490,17 +485,17 @@ MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
 
 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
 {
-    return PlatformTimeRanges::create(MediaTime::createWithDouble(minTimeSeekable()), MediaTime::createWithDouble(maxTimeSeekableDouble()));
+    return PlatformTimeRanges::create(minMediaTimeSeekable(), maxMediaTimeSeekable());
 }
 
-double MediaPlayerPrivateMediaSourceAVFObjC::maxTimeSeekableDouble() const
+MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
 {
-    return durationDouble();
+    return durationMediaTime();
 }
 
-double MediaPlayerPrivateMediaSourceAVFObjC::minTimeSeekable() const
+MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
 {
-    return startTimeDouble();
+    return startTime();
 }
 
 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
@@ -590,9 +585,9 @@ unsigned long MediaPlayerPrivateMediaSourceAVFObjC::corruptedVideoFrames()
     return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] numberOfCorruptedVideoFrames];
 }
 
-double MediaPlayerPrivateMediaSourceAVFObjC::totalFrameDelay()
+MediaTime MediaPlayerPrivateMediaSourceAVFObjC::totalFrameDelay()
 {
-    return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalFrameDelay];
+    return MediaTime::createWithDouble([[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalFrameDelay]);
 }
 
 #pragma mark -
index 792ed98..ee36f8b 100644 (file)
@@ -94,7 +94,7 @@ void MediaSourcePrivateAVFObjC::removeSourceBuffer(SourceBufferPrivate* buffer)
 
 MediaTime MediaSourcePrivateAVFObjC::duration()
 {
-    return MediaTime::createWithDouble(m_client->duration());
+    return m_client->duration();
 }
 
 std::unique_ptr<PlatformTimeRanges> MediaSourcePrivateAVFObjC::buffered()
index cba0176..67f838d 100644 (file)
@@ -41,7 +41,7 @@ public:
         return adoptRef(new OutOfBandTextTrackPrivateAVF(player, selection));
     }
     
-    virtual void processCue(CFArrayRef, CFArrayRef, double) override { }
+    virtual void processCue(CFArrayRef, CFArrayRef, const MediaTime&) override { }
     virtual void resetCueValues() override { }
     
     virtual Category textTrackCategory() const override { return OutOfBand; }
index 0bde8a2..9aa8d43 100644 (file)
@@ -34,7 +34,7 @@
 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
 #import "MediaSample.h"
 #import "MediaSourcePrivateAVFObjC.h"
-#import "MediaTimeMac.h"
+#import "MediaTimeAVFoundation.h"
 #import "NotImplemented.h"
 #import "SoftLinking.h"
 #import "SourceBufferPrivateClient.h"
index e1f6e32..7430e3f 100644 (file)
@@ -47,7 +47,7 @@ public:
     virtual AtomicString inBandMetadataTrackDispatchType() const override { return m_inBandMetadataTrackDispatchType; }
     void setInBandMetadataTrackDispatchType(const AtomicString& value) { m_inBandMetadataTrackDispatchType = value; }
 
-    void addDataCue(double start, double end, const void* data, unsigned length)
+    void addDataCue(const MediaTime& start, const MediaTime& end, const void* data, unsigned length)
     {
         ASSERT(cueFormat() == Data);
         client()->addDataCue(this, start, end, data, length);
index c687370..02b5a59 100644 (file)
@@ -39,6 +39,7 @@
 #include <gst/pbutils/missing-plugins.h>
 #include <limits>
 #include <wtf/HexNumber.h>
+#include <wtf/MediaTime.h>
 #include <wtf/gobject/GUniquePtr.h>
 #include <wtf/text/CString.h>
 
@@ -1091,7 +1092,7 @@ void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section
         gsize size;
         const void* bytes = g_bytes_get_data(data.get(), &size);
 
-        track->addDataCue(currentTimeDouble(), currentTimeDouble(), bytes, size);
+        track->addDataCue(MediaTime::createWithDouble(currentTimeDouble()), MediaTime::createWithDouble(currentTimeDouble()), bytes, size);
     }
 }
 #endif
@@ -1124,9 +1125,9 @@ void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry
     gint64 start = -1, stop = -1;
     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
     if (start != -1)
-        cue->setStartTime(static_cast<double>(start) / GST_SECOND);
+        cue->setStartTime(MediaTime(start, GST_SECOND));
     if (stop != -1)
-        cue->setEndTime(static_cast<double>(stop) / GST_SECOND);
+        cue->setEndTime(MediaTime(stop, GST_SECOND));
 
     GstTagList* tags = gst_toc_entry_get_tags(entry);
     if (tags) {
index d36fb60..623ee06 100644 (file)
@@ -66,7 +66,7 @@ MediaSourceGStreamer::AddStatus MediaSourceGStreamer::addSourceBuffer(const Cont
 
 void MediaSourceGStreamer::durationChanged()
 {
-    m_client->didReceiveDuration(m_mediaSource->duration());
+    m_client->didReceiveDuration(m_mediaSource->duration().toDouble());
 }
 
 void MediaSourceGStreamer::markEndOfStream(EndOfStreamStatus)
index aee896f..92abfa7 100644 (file)
@@ -104,9 +104,9 @@ private:
     bool paused() const;
     bool seeking() const;
     
-    float duration() const;
-    float currentTime() const;
-    void seek(float time);
+    virtual MediaTime durationMediaTime() const override;
+    virtual MediaTime currentMediaTime() const override;
+    virtual void seek(const MediaTime&) override;
     
     void setRate(float);
     void setVolume(float);
@@ -121,7 +121,7 @@ private:
     MediaPlayer::ReadyState readyState() const { return m_readyState; }
     
     std::unique_ptr<PlatformTimeRanges> buffered() const;
-    float maxTimeSeekable() const;
+    MediaTime maxMediaTimeSeekable() const;
     bool didLoadingProgress() const;
     unsigned totalBytes() const;
     
@@ -161,13 +161,11 @@ private:
     void createQTMovieLayer();
     void destroyQTMovieLayer();
 
-    QTTime createQTTime(float time) const;
-    
     void updateStates();
     void doSeek();
     void cancelSeek();
     void seekTimerFired(Timer<MediaPlayerPrivateQTKit>&);
-    float maxTimeLoaded() const;
+    MediaTime maxMediaTimeLoaded() const;
     void disableUnsupportedTracks();
     
     void sawUnsupportedTracks();
@@ -175,8 +173,6 @@ private:
     bool metaDataAvailable() const { return m_qtMovie && m_readyState >= MediaPlayer::HaveMetadata; }
 
     bool isReadyForVideoSetup() const;
-    
-    virtual float mediaTimeForTimeValue(float) const;
 
     virtual double maximumDurationToCacheMediaTime() const { return 5; }
 
@@ -192,7 +188,7 @@ private:
     RetainPtr<QTVideoRendererWebKitOnly> m_qtVideoRenderer;
     RetainPtr<WebCoreMovieObserver> m_objcObserver;
     String m_movieURL;
-    float m_seekTo;
+    MediaTime m_seekTo;
     Timer<MediaPlayerPrivateQTKit> m_seekTimer;
     MediaPlayer::NetworkState m_networkState;
     MediaPlayer::ReadyState m_readyState;
@@ -200,9 +196,9 @@ private:
     FloatSize m_scaleFactor;
     unsigned m_enabledTrackCount;
     unsigned m_totalTrackCount;
-    float m_reportedDuration;
-    float m_cachedDuration;
-    float m_timeToRestore;
+    MediaTime m_reportedDuration;
+    MediaTime m_cachedDuration;
+    MediaTime m_timeToRestore;
     RetainPtr<QTMovieLayer> m_qtVideoLayer;
     MediaPlayer::Preload m_preload;
     bool m_startedPlaying;
@@ -212,11 +208,11 @@ private:
     bool m_videoFrameHasDrawn;
     bool m_isAllowedToRender;
     bool m_privateBrowsing;
-    mutable float m_maxTimeLoadedAtLastDidLoadingProgress;
+    mutable MediaTime m_maxTimeLoadedAtLastDidLoadingProgress;
 #if DRAW_FRAME_RATE
     int  m_frameCountWhilePlaying;
-    double m_timeStartedPlaying;
-    double m_timeStoppedPlaying;
+    MediaTime m_timeStartedPlaying;
+    MediaTime m_timeStoppedPlaying;
 #endif
     mutable FloatSize m_cachedNaturalSize;
 };
index d180801..255835b 100644 (file)
@@ -38,6 +38,7 @@
 #import "URL.h"
 #import "Logging.h"
 #import "MIMETypeRegistry.h"
+#import "MediaTimeQTKit.h"
 #import "PlatformLayer.h"
 #import "PlatformTimeRanges.h"
 #import "SecurityOrigin.h"
@@ -96,6 +97,12 @@ SOFT_LINK_POINTER(QTKit, QTMovieApertureModeAttribute, NSString *)
 SOFT_LINK_POINTER_OPTIONAL(QTKit, QTSecurityPolicyNoLocalToRemoteSiteAttribute, NSString *)
 SOFT_LINK_POINTER_OPTIONAL(QTKit, QTSecurityPolicyNoRemoteToLocalSiteAttribute, NSString *)
 
+@interface QTMovie(WebKitExtras)
+- (QTTime)maxTimeLoaded;
+- (NSArray *)availableRanges;
+- (NSArray *)loadedRanges;
+@end
+
 #define QTMovie getQTMovieClass()
 #define QTMovieView getQTMovieViewClass()
 #define QTMovieLayer getQTMovieLayerClass()
@@ -191,7 +198,7 @@ void MediaPlayerPrivateQTKit::registerMediaEngine(MediaEngineRegistrar registrar
 MediaPlayerPrivateQTKit::MediaPlayerPrivateQTKit(MediaPlayer* player)
     : m_player(player)
     , m_objcObserver(adoptNS([[WebCoreMovieObserver alloc] initWithCallback:this]))
-    , m_seekTo(-1)
+    , m_seekTo(MediaTime::invalidTime())
     , m_seekTimer(this, &MediaPlayerPrivateQTKit::seekTimerFired)
     , m_networkState(MediaPlayer::Empty)
     , m_readyState(MediaPlayer::HaveNothing)
@@ -199,9 +206,9 @@ MediaPlayerPrivateQTKit::MediaPlayerPrivateQTKit(MediaPlayer* player)
     , m_scaleFactor(1, 1)
     , m_enabledTrackCount(0)
     , m_totalTrackCount(0)
-    , m_reportedDuration(-1)
-    , m_cachedDuration(-1)
-    , m_timeToRestore(-1)
+    , m_reportedDuration(MediaTime::invalidTime())
+    , m_cachedDuration(MediaTime::invalidTime())
+    , m_timeToRestore(MediaTime::invalidTime())
     , m_preload(MediaPlayer::Auto)
     , m_startedPlaying(false)
     , m_isStreaming(false)
@@ -210,7 +217,6 @@ MediaPlayerPrivateQTKit::MediaPlayerPrivateQTKit(MediaPlayer* player)
     , m_videoFrameHasDrawn(false)
     , m_isAllowedToRender(false)
     , m_privateBrowsing(false)
-    , m_maxTimeLoadedAtLastDidLoadingProgress(0)
 #if DRAW_FRAME_RATE
     , m_frameCountWhilePlaying(0)
     , m_timeStartedPlaying(0)
@@ -610,14 +616,6 @@ bool MediaPlayerPrivateQTKit::hasSetUpVideoRendering() const
         || m_qtVideoRenderer;
 }
 
-QTTime MediaPlayerPrivateQTKit::createQTTime(float time) const
-{
-    if (!metaDataAvailable())
-        return QTMakeTime(0, 600);
-    long timeScale = [[m_qtMovie.get() attributeForKey:QTMovieTimeScaleAttribute] longValue];
-    return QTMakeTime(lroundf(time * timeScale), timeScale);
-}
-
 void MediaPlayerPrivateQTKit::resumeLoad()
 {
     if (!m_movieURL.isNull())
@@ -713,31 +711,32 @@ void MediaPlayerPrivateQTKit::pause()
     [m_objcObserver.get() setDelayCallbacks:NO];
 }
 
-float MediaPlayerPrivateQTKit::duration() const
+MediaTime MediaPlayerPrivateQTKit::durationMediaTime() const
 {
     if (!metaDataAvailable())
-        return 0;
+        return MediaTime::zeroTime();
 
-    if (m_cachedDuration != MediaPlayer::invalidTime())
+    if (m_cachedDuration.isValid())
         return m_cachedDuration;
 
     QTTime time = [m_qtMovie.get() duration];
     if (time.flags == kQTTimeIsIndefinite)
-        return std::numeric_limits<float>::infinity();
-    return static_cast<float>(time.timeValue) / time.timeScale;
+        return MediaTime::positiveInfiniteTime();
+    return toMediaTime(time);
 }
 
-float MediaPlayerPrivateQTKit::currentTime() const
+MediaTime MediaPlayerPrivateQTKit::currentMediaTime() const
 {
     if (!metaDataAvailable())
-        return 0;
+        return MediaTime::zeroTime();
     QTTime time = [m_qtMovie.get() currentTime];
-    return static_cast<float>(time.timeValue) / time.timeScale;
+    return toMediaTime(time);
 }
 
-void MediaPlayerPrivateQTKit::seek(float time)
+void MediaPlayerPrivateQTKit::seek(const MediaTime& inTime)
 {
-    LOG(Media, "MediaPlayerPrivateQTKit::seek(%p) - time %f", this, time);
+    MediaTime time = inTime;
+    LOG(Media, "MediaPlayerPrivateQTKit::seek(%p) - time %s", this, toString(time).utf8().data());
     // Nothing to do if we are already in the middle of a seek to the same time.
     if (time == m_seekTo)
         return;
@@ -747,11 +746,11 @@ void MediaPlayerPrivateQTKit::seek(float time)
     if (!metaDataAvailable())
         return;
     
-    if (time > duration())
-        time = duration();
+    if (time > durationMediaTime())
+        time = durationMediaTime();
 
     m_seekTo = time;
-    if (maxTimeSeekable() >= m_seekTo)
+    if (maxMediaTimeSeekable() >= m_seekTo)
         doSeek();
     else 
         m_seekTimer.start(0, 0.5f);
@@ -759,7 +758,7 @@ void MediaPlayerPrivateQTKit::seek(float time)
 
 void MediaPlayerPrivateQTKit::doSeek() 
 {
-    QTTime qttime = createQTTime(m_seekTo);
+    QTTime qttime = toQTTime(m_seekTo);
     // setCurrentTime generates several event callbacks, update afterwards
     [m_objcObserver.get() setDelayCallbacks:YES];
     float oldRate = [m_qtMovie.get() rate];
@@ -769,8 +768,8 @@ void MediaPlayerPrivateQTKit::doSeek()
     [m_qtMovie.get() setCurrentTime:qttime];
 
     // restore playback only if not at end, otherwise QTMovie will loop
-    float timeAfterSeek = currentTime();
-    if (oldRate && timeAfterSeek < duration())
+    MediaTime timeAfterSeek = currentMediaTime();
+    if (oldRate && timeAfterSeek < durationMediaTime())
         [m_qtMovie.get() setRate:oldRate];
 
     cancelSeek();
@@ -780,20 +779,20 @@ void MediaPlayerPrivateQTKit::doSeek()
 void MediaPlayerPrivateQTKit::cancelSeek()
 {
     LOG(Media, "MediaPlayerPrivateQTKit::cancelSeek(%p)", this);
-    m_seekTo = -1;
+    m_seekTo = MediaTime::invalidTime();
     m_seekTimer.stop();
 }
 
 void MediaPlayerPrivateQTKit::seekTimerFired(Timer<MediaPlayerPrivateQTKit>&)
 {        
-    if (!metaDataAvailable()|| !seeking() || currentTime() == m_seekTo) {
+    if (!metaDataAvailable() || !seeking() || currentMediaTime() == m_seekTo) {
         cancelSeek();
         updateStates();
         m_player->timeChanged(); 
         return;
     } 
 
-    if (maxTimeSeekable() >= m_seekTo)
+    if (maxMediaTimeSeekable() >= m_seekTo)
         doSeek();
     else {
         MediaPlayer::NetworkState state = networkState();
@@ -816,7 +815,7 @@ bool MediaPlayerPrivateQTKit::seeking() const
 {
     if (!metaDataAvailable())
         return false;
-    return m_seekTo >= 0;
+    return m_seekTo >= MediaTime::zeroTime();
 }
 
 IntSize MediaPlayerPrivateQTKit::naturalSize() const
@@ -912,7 +911,7 @@ void MediaPlayerPrivateQTKit::setPreservesPitch(bool preservesPitch)
     RetainPtr<NSDictionary> movieAttributes = adoptNS([[m_qtMovie.get() movieAttributes] mutableCopy]);
     ASSERT(movieAttributes);
     [movieAttributes.get() setValue:[NSNumber numberWithBool:preservesPitch] forKey:QTMovieRateChangesPreservePitchAttribute];
-    m_timeToRestore = currentTime();
+    m_timeToRestore = currentMediaTime();
 
     createQTMovie([movieAttributes.get() valueForKey:QTMovieURLAttribute], movieAttributes.get());
 }
@@ -920,36 +919,60 @@ void MediaPlayerPrivateQTKit::setPreservesPitch(bool preservesPitch)
 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateQTKit::buffered() const
 {
     auto timeRanges = PlatformTimeRanges::create();
-    float loaded = maxTimeLoaded();
-    if (loaded > 0)
-        timeRanges->add(MediaTime::zeroTime(), MediaTime::createWithDouble(loaded));
+    MediaTime loaded = maxMediaTimeLoaded();
+    if (loaded > MediaTime::zeroTime())
+        timeRanges->add(MediaTime::zeroTime(), loaded);
     return timeRanges;
 }
 
-float MediaPlayerPrivateQTKit::maxTimeSeekable() const
+static MediaTime maxValueForTimeRanges(NSArray *ranges)
+{
+    if (!ranges)
+        return MediaTime::zeroTime();
+
+    MediaTime max;
+    for (NSValue *value in ranges) {
+        QTTimeRange range = [value QTTimeRangeValue];
+        if (!range.time.timeScale || !range.duration.timeScale)
+            continue;
+
+        MediaTime time = toMediaTime(range.time);
+        MediaTime duration = toMediaTime(range.duration);
+        if (time.isValid() && duration.isValid())
+            max = std::max(max, time + duration);
+    }
+
+    return max;
+}
+
+MediaTime MediaPlayerPrivateQTKit::maxMediaTimeSeekable() const
 {
     if (!metaDataAvailable())
-        return 0;
+        return MediaTime::zeroTime();
 
     // infinite duration means live stream
-    if (std::isinf(duration()))
-        return 0;
+    if (durationMediaTime().isPositiveInfinite())
+        return MediaTime::zeroTime();
+
+    NSArray* seekableRanges = [m_qtMovie availableRanges];
 
-    return wkQTMovieMaxTimeSeekable(m_qtMovie.get());
+    return maxValueForTimeRanges(seekableRanges);
 }
 
-float MediaPlayerPrivateQTKit::maxTimeLoaded() const
+MediaTime MediaPlayerPrivateQTKit::maxMediaTimeLoaded() const
 {
     if (!metaDataAvailable())
-        return 0;
-    return wkQTMovieMaxTimeLoaded(m_qtMovie.get()); 
+        return MediaTime::zeroTime();
+    if ([m_qtMovie respondsToSelector:@selector(loadedRanges)])
+        return maxValueForTimeRanges([m_qtMovie loadedRanges]);
+    return toMediaTime([m_qtMovie maxTimeLoaded]);
 }
 
 bool MediaPlayerPrivateQTKit::didLoadingProgress() const
 {
     if (!duration() || !totalBytes())
         return false;
-    float currentMaxTimeLoaded = maxTimeLoaded();
+    MediaTime currentMaxTimeLoaded = maxMediaTimeLoaded();
     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
     return didLoadingProgress;
@@ -1049,9 +1072,9 @@ void MediaPlayerPrivateQTKit::updateStates()
     }
     
     // If this movie is reloading and we mean to restore the current time/rate, this might be the right time to do it.
-    if (loadState >= QTMovieLoadStateLoaded && oldNetworkState < MediaPlayer::Loaded && m_timeToRestore != MediaPlayer::invalidTime()) {
-        QTTime qttime = createQTTime(m_timeToRestore);
-        m_timeToRestore = MediaPlayer::invalidTime();
+    if (loadState >= QTMovieLoadStateLoaded && oldNetworkState < MediaPlayer::Loaded && m_timeToRestore.isValid()) {
+        QTTime qttime = toQTTime(m_timeToRestore);
+        m_timeToRestore = MediaTime::invalidTime();
             
         // Disable event callbacks from setCurrentTime for restoring time in a recreated video
         [m_objcObserver.get() setDelayCallbacks:YES];
@@ -1065,7 +1088,7 @@ void MediaPlayerPrivateQTKit::updateStates()
     // Note: QT indicates that we are fully loaded with QTMovieLoadStateComplete.
     // However newer versions of QT do not, so we check maxTimeLoaded against duration.
     if (!completelyLoaded && !m_isStreaming && metaDataAvailable())
-        completelyLoaded = maxTimeLoaded() == duration();
+        completelyLoaded = maxMediaTimeLoaded() == durationMediaTime();
 
     if (completelyLoaded) {
         // "Loaded" is reserved for fully buffered movies, never the case when streaming
@@ -1076,7 +1099,7 @@ void MediaPlayerPrivateQTKit::updateStates()
         m_networkState = MediaPlayer::Loading;
     } else if (loadState >= QTMovieLoadStatePlayable) {
         // FIXME: This might not work correctly in streaming case, <rdar://problem/5693967>
-        m_readyState = currentTime() < maxTimeLoaded() ? MediaPlayer::HaveFutureData : MediaPlayer::HaveCurrentData;
+        m_readyState = currentMediaTime() < maxMediaTimeLoaded() ? MediaPlayer::HaveFutureData : MediaPlayer::HaveCurrentData;
         m_networkState = MediaPlayer::Loading;
     } else if (loadState >= QTMovieLoadStateLoaded) {
         m_readyState = MediaPlayer::HaveMetadata;
@@ -1094,7 +1117,7 @@ void MediaPlayerPrivateQTKit::updateStates()
             return;
         }
 
-        float loaded = maxTimeLoaded();
+        MediaTime loaded = maxMediaTimeLoaded();
         if (!loaded)
             m_readyState = MediaPlayer::HaveNothing;
 
@@ -1102,7 +1125,7 @@ void MediaPlayerPrivateQTKit::updateStates()
             m_networkState = MediaPlayer::FormatError;
         else {
             // FIXME: We should differentiate between load/network errors and decode errors <rdar://problem/5605692>
-            if (loaded > 0)
+            if (loaded > MediaTime::zeroTime())
                 m_networkState = MediaPlayer::DecodeError;
             else
                 m_readyState = MediaPlayer::HaveNothing;
@@ -1126,9 +1149,9 @@ void MediaPlayerPrivateQTKit::updateStates()
         m_player->readyStateChanged();
 
     if (loadState >= QTMovieLoadStateLoaded) {
-        float dur = duration();
+        MediaTime dur = durationMediaTime();
         if (dur != m_reportedDuration) {
-            if (m_reportedDuration != MediaPlayer::invalidTime())
+            if (m_reportedDuration.isValid())
                 m_player->durationChanged();
             m_reportedDuration = dur;
         }
@@ -1179,10 +1202,10 @@ void MediaPlayerPrivateQTKit::timeChanged()
     // It may not be possible to seek to a specific time in a streamed movie. When seeking in a 
     // stream QuickTime sets the movie time to closest time possible and posts a timechanged 
     // notification. Update m_seekTo so we can detect when the seek completes.
-    if (m_seekTo != -1)
-        m_seekTo = currentTime();
+    if (!m_seekTo.isValid())
+        m_seekTo = currentMediaTime();
 
-    m_timeToRestore = MediaPlayer::invalidTime();
+    m_timeToRestore = MediaTime::invalidTime();
     updateStates();
     m_player->timeChanged();
 }
@@ -1202,8 +1225,8 @@ void MediaPlayerPrivateQTKit::didEnd()
     // are at the end. Do this because QuickTime sometimes reports one time for duration and stops
     // playback at another time, which causes problems in HTMLMediaElement. QTKit's 'ended' event 
     // fires when playing in reverse so don't update duration when at time zero!
-    float now = currentTime();
-    if (now > 0)
+    MediaTime now = currentMediaTime();
+    if (now > MediaTime::zeroTime())
         m_cachedDuration = now;
 
     updateStates();
@@ -1647,15 +1670,6 @@ void MediaPlayerPrivateQTKit::setPreload(MediaPlayer::Preload preload)
         [m_qtMovie.get() setAttribute:[NSNumber numberWithBool:NO] forKey:@"QTMovieLimitReadAheadAttribute"];
 }
 
-float MediaPlayerPrivateQTKit::mediaTimeForTimeValue(float timeValue) const
-{
-    if (!metaDataAvailable())
-        return timeValue;
-
-    QTTime qttime = createQTTime(timeValue);
-    return static_cast<float>(qttime.timeValue) / qttime.timeScale;
-}
-
 void MediaPlayerPrivateQTKit::setPrivateBrowsingMode(bool privateBrowsing)
 {
     m_privateBrowsing = privateBrowsing;
diff --git a/Source/WebCore/platform/graphics/mac/MediaTimeQTKit.h b/Source/WebCore/platform/graphics/mac/MediaTimeQTKit.h
new file mode 100644 (file)
index 0000000..ada8ebd
--- /dev/null
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2014 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef MediaTimeQTKit_h
+#define MediaTimeQTKit_h
+
+#if PLATFORM(MAC)
+
+#include <QTKit/QTTime.h>
+#include <wtf/MediaTime.h>
+
+namespace WebCore {
+
+QTTime toQTTime(const MediaTime&);
+MediaTime toMediaTime(const QTTime&);
+    
+}
+
+#endif
+
+#endif // MediaTimeQTKit_h
diff --git a/Source/WebCore/platform/graphics/mac/MediaTimeQTKit.mm b/Source/WebCore/platform/graphics/mac/MediaTimeQTKit.mm
new file mode 100644 (file)
index 0000000..9415aa4
--- /dev/null
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2014 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "MediaTimeQTKit.h"
+
+#if PLATFORM(MAC)
+
+#import "SoftLinking.h"
+#import <QTKit/QTTime.h>
+
+SOFT_LINK_FRAMEWORK(QTKit);
+SOFT_LINK_CONSTANT(QTKit, QTIndefiniteTime, QTTime);
+SOFT_LINK_CONSTANT(QTKit, QTZeroTime, QTTime);
+SOFT_LINK(QTKit, QTTimeCompare, NSComparisonResult, (QTTime time, QTTime otherTime), (time, otherTime));
+SOFT_LINK(QTKit, QTMakeTime, QTTime, (long long timeValue, long timeScale), (timeValue, timeScale));
+
+namespace WebCore {
+
+MediaTime toMediaTime(const QTTime& qtTime)
+{
+    if (qtTime.flags & kQTTimeIsIndefinite)
+        return MediaTime::indefiniteTime();
+    return MediaTime(qtTime.timeValue, qtTime.timeScale);
+}
+
+QTTime toQTTime(const MediaTime& mediaTime)
+{
+    if (mediaTime.isIndefinite() || mediaTime.isInvalid())
+        return getQTIndefiniteTime();
+    if (!mediaTime)
+        return getQTZeroTime();
+
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+    return QTMakeTime(mediaTime.timeValue(), mediaTime.timeScale());
+#pragma clang diagnostic pop
+}
+
+}
+
+#endif
index d7ac8ff..1b53698 100644 (file)
@@ -29,7 +29,7 @@
 
 #import "PlatformClockCM.h"
 
-#import "MediaTimeMac.h"
+#import "MediaTimeAVFoundation.h"
 #import "SoftLinking.h"
 #if PLATFORM(IOS)
 #import <CoreMedia/CMAudioClock.h>
index 6f51c24..c66178b 100644 (file)
@@ -159,9 +159,9 @@ MediaPlayer::ReadyState MockMediaPlayerMediaSource::readyState() const
     return m_readyState;
 }
 
-double MockMediaPlayerMediaSource::maxTimeSeekableDouble() const
+MediaTime MockMediaPlayerMediaSource::maxMediaTimeSeekable() const
 {
-    return m_duration.toDouble();
+    return m_duration;
 }
 
 std::unique_ptr<PlatformTimeRanges> MockMediaPlayerMediaSource::buffered() const
@@ -185,23 +185,23 @@ void MockMediaPlayerMediaSource::paint(GraphicsContext*, const IntRect&)
 {
 }
 
-double MockMediaPlayerMediaSource::currentTimeDouble() const
+MediaTime MockMediaPlayerMediaSource::currentMediaTime() const
 {
-    return m_currentTime.toDouble();
+    return m_currentTime;
 }
 
-double MockMediaPlayerMediaSource::durationDouble() const
+MediaTime MockMediaPlayerMediaSource::durationMediaTime() const
 {
-    return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : 0;
+    return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
 }
 
-void MockMediaPlayerMediaSource::seekWithTolerance(double time, double negativeTolerance, double positiveTolerance)
+void MockMediaPlayerMediaSource::seekWithTolerance(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
 {
     if (!negativeTolerance && !positiveTolerance) {
-        m_currentTime = MediaTime::createWithDouble(time);
-        m_mediaSourcePrivate->seekToTime(MediaTime::createWithDouble(time));
+        m_currentTime = time;
+        m_mediaSourcePrivate->seekToTime(time);
     } else
-        m_currentTime = m_mediaSourcePrivate->seekToTime(MediaTime::createWithDouble(time), MediaTime::createWithDouble(negativeTolerance), MediaTime::createWithDouble(positiveTolerance));
+        m_currentTime = m_mediaSourcePrivate->seekToTime(time, negativeTolerance, positiveTolerance);
 
     if (m_seekCompleted) {
         m_player->timeChanged();
@@ -285,9 +285,9 @@ unsigned long MockMediaPlayerMediaSource::corruptedVideoFrames()
     return m_mediaSourcePrivate ? m_mediaSourcePrivate->corruptedVideoFrames() : 0;
 }
 
-double MockMediaPlayerMediaSource::totalFrameDelay()
+MediaTime MockMediaPlayerMediaSource::totalFrameDelay()
 {
-    return m_mediaSourcePrivate ? m_mediaSourcePrivate->totalFrameDelay() : 0;
+    return m_mediaSourcePrivate ? m_mediaSourcePrivate->totalFrameDelay() : MediaTime::zeroTime();
 }
 
 }
index 5a6da9d..b6694d8 100644 (file)
@@ -71,18 +71,18 @@ private:
     virtual bool seeking() const override;
     virtual bool paused() const override;
     virtual MediaPlayer::NetworkState networkState() const override;
-    virtual double maxTimeSeekableDouble() const override;
+    virtual MediaTime maxMediaTimeSeekable() const override;
     virtual std::unique_ptr<PlatformTimeRanges> buffered() const override;
     virtual bool didLoadingProgress() const override;
     virtual void setSize(const IntSize&) override;
     virtual void paint(GraphicsContext*, const IntRect&) override;
-    virtual double currentTimeDouble() const override;
-    virtual double durationDouble() const override;
-    virtual void seekWithTolerance(double time, double, double) override;
+    virtual MediaTime currentMediaTime() const override;
+    virtual MediaTime durationMediaTime() const override;
+    virtual void seekWithTolerance(const MediaTime&, const MediaTime&, const MediaTime&) override;
     virtual unsigned long totalVideoFrames() override;
     virtual unsigned long droppedVideoFrames() override;
     virtual unsigned long corruptedVideoFrames() override;
-    virtual double totalFrameDelay() override;
+    virtual MediaTime totalFrameDelay() override;
 
     MediaPlayer* m_player;
     RefPtr<MockMediaSourcePrivate> m_mediaSourcePrivate;
index fb49d4e..45aecfa 100644 (file)
@@ -50,7 +50,6 @@ MockMediaSourcePrivate::MockMediaSourcePrivate(MockMediaPlayerMediaSource* paren
     , m_totalVideoFrames(0)
     , m_droppedVideoFrames(0)
     , m_corruptedVideoFrames(0)
-    , m_totalFrameDelay(0)
 {
 }
 
@@ -87,7 +86,7 @@ void MockMediaSourcePrivate::removeSourceBuffer(SourceBufferPrivate* buffer)
     m_sourceBuffers.remove(pos);
 }
 
-double MockMediaSourcePrivate::duration()
+MediaTime MockMediaSourcePrivate::duration()
 {
     return m_client->duration();
 }
@@ -99,7 +98,7 @@ std::unique_ptr<PlatformTimeRanges> MockMediaSourcePrivate::buffered()
 
 void MockMediaSourcePrivate::durationChanged()
 {
-    m_player->updateDuration(MediaTime::createWithDouble(duration()));
+    m_player->updateDuration(duration());
 }
 
 void MockMediaSourcePrivate::markEndOfStream(EndOfStreamStatus status)
index 0b35a10..4b1ee25 100644 (file)
@@ -47,7 +47,7 @@ public:
     bool hasAudio() const;
     bool hasVideo() const;
 
-    double duration();
+    MediaTime duration();
     std::unique_ptr<PlatformTimeRanges> buffered();
 
     MockMediaPlayerMediaSource* player() const { return m_player; }
@@ -58,12 +58,12 @@ public:
     unsigned long totalVideoFrames() const { return m_totalVideoFrames; }
     unsigned long droppedVideoFrames() const  { return m_droppedVideoFrames; }
     unsigned long corruptedVideoFrames() const { return m_corruptedVideoFrames; }
-    double totalFrameDelay() const { return m_totalFrameDelay; }
+    MediaTime totalFrameDelay() const { return m_totalFrameDelay; }
 
     void incrementTotalVideoFrames() { ++m_totalVideoFrames; }
     void incrementDroppedFrames() { ++m_droppedVideoFrames; }
     void incrementCorruptedFrames() { ++m_corruptedVideoFrames; }
-    void incrementTotalFrameDelayBy(double delay) { m_totalFrameDelay += delay; }
+    void incrementTotalFrameDelayBy(const MediaTime& delay) { m_totalFrameDelay += delay; }
 
 private:
     MockMediaSourcePrivate(MockMediaPlayerMediaSource*, MediaSourcePrivateClient*);
@@ -92,7 +92,7 @@ private:
     unsigned long m_totalVideoFrames;
     unsigned long m_droppedVideoFrames;
     unsigned long m_corruptedVideoFrames;
-    double m_totalFrameDelay;
+    MediaTime m_totalFrameDelay;
 };
 
 }
index a22fa28..5486ffa 100644 (file)
@@ -238,7 +238,7 @@ void MockSourceBufferPrivate::enqueueSample(PassRefPtr<MediaSample> sample, Atom
     if (box->isDropped())
         m_mediaSource->incrementDroppedFrames();
     if (box->isDelayed())
-        m_mediaSource->incrementTotalFrameDelayBy(1);
+        m_mediaSource->incrementTotalFrameDelayBy(MediaTime(1, 1));
 }
 
 bool MockSourceBufferPrivate::hasVideo() const
index 4f61036..e775919 100644 (file)
@@ -324,8 +324,8 @@ void TiledCoreAnimationDrawingArea::suspendPainting()
     ASSERT(!m_isPaintingSuspended);
     m_isPaintingSuspended = true;
 
-    [m_hostingLayer setValue:@YES forKey:@"NSCAViewRenderPaused"];
-    [[NSNotificationCenter defaultCenter] postNotificationName:@"NSCAViewRenderDidPauseNotification" object:nil userInfo:[NSDictionary dictionaryWithObject:m_hostingLayer.get() forKey:@"layer"]];
+//    [m_hostingLayer setValue:@YES forKey:@"NSCAViewRenderPaused"];
+//    [[NSNotificationCenter defaultCenter] postNotificationName:@"NSCAViewRenderDidPauseNotification" object:nil userInfo:[NSDictionary dictionaryWithObject:m_hostingLayer.get() forKey:@"layer"]];
 }
 
 void TiledCoreAnimationDrawingArea::resumePainting()
@@ -337,8 +337,8 @@ void TiledCoreAnimationDrawingArea::resumePainting()
     }
     m_isPaintingSuspended = false;
 
-    [m_hostingLayer setValue:@NO forKey:@"NSCAViewRenderPaused"];
-    [[NSNotificationCenter defaultCenter] postNotificationName:@"NSCAViewRenderDidResumeNotification" object:nil userInfo:[NSDictionary dictionaryWithObject:m_hostingLayer.get() forKey:@"layer"]];
+//    [m_hostingLayer setValue:@NO forKey:@"NSCAViewRenderPaused"];
+//    [[NSNotificationCenter defaultCenter] postNotificationName:@"NSCAViewRenderDidResumeNotification" object:nil userInfo:[NSDictionary dictionaryWithObject:m_hostingLayer.get() forKey:@"layer"]];
 }
 
 void TiledCoreAnimationDrawingArea::setExposedRect(const FloatRect& exposedRect)