diff --git a/LayoutTests/media/media-source/media-source-monitor-playing-event-expected.txt b/LayoutTests/media/media-source/media-source-monitor-playing-event-expected.txt index 1282ffafa3ee6..f366f20e12eb7 100644 --- a/LayoutTests/media/media-source/media-source-monitor-playing-event-expected.txt +++ b/LayoutTests/media/media-source/media-source-monitor-playing-event-expected.txt @@ -16,7 +16,7 @@ EVENT(canplay) EVENT(updateend) EVENT(canplaythrough) EVENT(playing) -video.readyState : HAVE_ENOUGH_DATA +EXPECTED (video.readyState >= readyStateString.indexOf("HAVE_CURRENT_DATA") == 'true') OK RUN(sourceBuffer.remove(0,10)) EVENT(updateend) EVENT(waiting) diff --git a/LayoutTests/media/media-source/media-source-monitor-playing-event.html b/LayoutTests/media/media-source/media-source-monitor-playing-event.html index 1917ae0d37693..2408ddb0b483b 100644 --- a/LayoutTests/media/media-source/media-source-monitor-playing-event.html +++ b/LayoutTests/media/media-source/media-source-monitor-playing-event.html @@ -11,7 +11,6 @@ var sample; var handleVideoEvents = [ "loadstart", - "waiting", "loadedmetadata", "loadeddata", "canplay", @@ -62,10 +61,17 @@ run('sourceBuffer.appendBuffer(sample)'); await Promise.all([waitFor(mediaElement, 'playing'), waitFor(sourceBuffer, 'updateend')]); - consoleWrite('video.readyState : ' + readyStateString[video.readyState]); + // As per the MockMediaPlayerMediaSource implementation, currentTime=10 (the maximum playable time) at + // this point, right after playback has started (at least in WebKitGTK), so we no longer have + // HAVE_ENOUGH_DATA. We have HAVE_CURRENT_DATA instead. We can't test for HAVE_ENOUGH_DATA and for + // canplaythrough at the same time in a single test with the current MockMediaPlayerMediaSource + // implementation. However, we get HAVE_ENOUGH_DATA on some Apple implementations. To avoid problems, + // let's just check for >= HAVE_CURRENT_DATA. + testExpected('video.readyState >= readyStateString.indexOf("HAVE_CURRENT_DATA")', true); // This remove changes ready state to HAVE_METADATA. run('sourceBuffer.remove(0,10)'); - await waitFor(sourceBuffer, 'updateend'); + // Waiting is time-dependant and can happen more than once. We're only interested in at least one occurence. + await Promise.all([waitFor(mediaElement, 'waiting'), waitFor(sourceBuffer, 'updateend')]); await sleepFor(1000); @@ -74,8 +80,9 @@ run('sourceBuffer.appendBuffer(sample)'); await waitFor(sourceBuffer, 'updateend'); + // Append at least 3s more than currentTime (10) to create an additional playable range that can trigger canPlayThrough. consoleWrite('video.readyState : ' + readyStateString[video.readyState]); - sample = makeASample(1, 1, 9, 1, 1, SAMPLE_FLAG.SYNC, 1); + sample = makeASample(1, 1, 12, 1, 1, SAMPLE_FLAG.SYNC, 1); // This append changes the ready state to HAVE_ENOUGH_DATA and fires the playing event. run('sourceBuffer.appendBuffer(sample)'); await Promise.all([waitFor(mediaElement, 'playing'), waitFor(sourceBuffer, 'updateend')]); diff --git a/Source/WebCore/Modules/mediasource/SourceBuffer.cpp b/Source/WebCore/Modules/mediasource/SourceBuffer.cpp index 1be76f6207232..685940723e7c7 100644 --- a/Source/WebCore/Modules/mediasource/SourceBuffer.cpp +++ b/Source/WebCore/Modules/mediasource/SourceBuffer.cpp @@ -87,7 +87,6 @@ SourceBuffer::SourceBuffer(Ref&& sourceBufferPrivate, Media , m_appendWindowStart(MediaTime::zeroTime()) , m_appendWindowEnd(MediaTime::positiveInfiniteTime()) , m_appendState(WaitingForSegment) - , m_timeOfBufferingMonitor(MonotonicTime::fromRawSeconds(0)) , m_pendingRemoveStart(MediaTime::invalidTime()) , m_pendingRemoveEnd(MediaTime::invalidTime()) , m_removeTimer(*this, &SourceBuffer::removeTimerFired) @@ -227,7 +226,6 @@ ExceptionOr SourceBuffer::setAppendWindowEnd(double newValue) ExceptionOr SourceBuffer::appendBuffer(const BufferSource& data) { - monitorBufferingRate(); return appendBufferInternal(static_cast(data.data()), data.length()); } @@ -593,7 +591,6 @@ void SourceBuffer::sourceBufferPrivateAppendComplete(AppendResult result) scheduleEvent(eventNames().updateendEvent); m_source->monitorSourceBuffers(); - monitorBufferingRate(); m_private->reenqueueMediaIfNeeded(m_source->currentTime()); DEBUG_LOG(LOGIDENTIFIER, "buffered = ", m_private->buffered()->ranges()); @@ -1174,11 +1171,6 @@ void SourceBuffer::textTrackLanguageChanged(TextTrack& track) m_textTracks->scheduleChangeEvent(); } -void SourceBuffer::sourceBufferPrivateDidParseSample(double frameDuration) -{ - m_bufferedSinceLastMonitor += frameDuration; -} - void SourceBuffer::sourceBufferPrivateDurationChanged(const MediaTime& duration, CompletionHandler&& completionHandler) { if (isRemoved()) { @@ -1209,51 +1201,28 @@ void SourceBuffer::sourceBufferPrivateStreamEndedWithDecodeError() m_source->streamEndedWithError(MediaSource::EndOfStreamError::Decode); } -void SourceBuffer::monitorBufferingRate() -{ - // We avoid the first update of m_averageBufferRate on purpose, but in exchange we get a more accurate m_timeOfBufferingMonitor initial time. - if (!m_timeOfBufferingMonitor) { - m_timeOfBufferingMonitor = MonotonicTime::now(); - return; - } - - MonotonicTime now = MonotonicTime::now(); - Seconds interval = now - m_timeOfBufferingMonitor; - double rateSinceLastMonitor = m_bufferedSinceLastMonitor / interval.seconds(); - - m_timeOfBufferingMonitor = now; - m_bufferedSinceLastMonitor = 0; - - m_averageBufferRate += (interval.seconds() * ExponentialMovingAverageCoefficient) * (rateSinceLastMonitor - m_averageBufferRate); - - DEBUG_LOG(LOGIDENTIFIER, m_averageBufferRate); -} - bool SourceBuffer::canPlayThroughRange(const PlatformTimeRanges& ranges) { if (isRemoved()) return false; - monitorBufferingRate(); - - // Assuming no fluctuations in the buffering rate, loading 1 second per second or greater - // means indefinite playback. This could be improved by taking jitter into account. - if (m_averageBufferRate > 1) - return true; - - // Add up all the time yet to be buffered. - MediaTime currentTime = m_source->currentTime(); MediaTime duration = m_source->duration(); + if (!duration.isValid()) + return false; - PlatformTimeRanges unbufferedRanges = ranges; - unbufferedRanges.invert(); - unbufferedRanges.intersectWith(PlatformTimeRanges(currentTime, std::max(currentTime, duration))); - MediaTime unbufferedTime = unbufferedRanges.totalDuration(); - if (!unbufferedTime.isValid()) + MediaTime currentTime = m_source->currentTime(); + if (duration <= currentTime) return true; - MediaTime timeRemaining = duration - currentTime; - return unbufferedTime.toDouble() / m_averageBufferRate < timeRemaining.toDouble(); + // If we have data up to the mediasource's duration or 3s ahead, we can + // assume that we can play without interruption. + MediaTime bufferedEnd = ranges.maximumBufferedTime(); + // Same tolerance as contiguousFrameTolerance in SourceBufferPrivate::processMediaSample(), + // to account for small errors. + const MediaTime tolerance = MediaTime(1, 1000); + MediaTime timeAhead = std::min(duration, currentTime + MediaTime(3, 1)) - tolerance; + + return bufferedEnd >= timeAhead; } void SourceBuffer::sourceBufferPrivateReportExtraMemoryCost(uint64_t extraMemory) diff --git a/Source/WebCore/Modules/mediasource/SourceBuffer.h b/Source/WebCore/Modules/mediasource/SourceBuffer.h index b76e4b2615a66..fd7c678ba00ea 100644 --- a/Source/WebCore/Modules/mediasource/SourceBuffer.h +++ b/Source/WebCore/Modules/mediasource/SourceBuffer.h @@ -162,7 +162,6 @@ class SourceBuffer final void sourceBufferPrivateAppendComplete(AppendResult) final; void sourceBufferPrivateHighestPresentationTimestampChanged(const MediaTime&) final; void sourceBufferPrivateDurationChanged(const MediaTime& duration, CompletionHandler&&) final; - void sourceBufferPrivateDidParseSample(double sampleDuration) final; void sourceBufferPrivateDidDropSample() final; void sourceBufferPrivateBufferedDirtyChanged(bool) final; void sourceBufferPrivateDidReceiveRenderingError(int64_t errorCode) final; @@ -201,8 +200,6 @@ class SourceBuffer final uint64_t maximumBufferSize() const; - void monitorBufferingRate(); - void removeTimerFired(); void reportExtraMemoryAllocated(uint64_t extraMemory); @@ -243,9 +240,6 @@ class SourceBuffer final enum AppendStateType { WaitingForSegment, ParsingInitSegment, ParsingMediaSegment }; AppendStateType m_appendState; - MonotonicTime m_timeOfBufferingMonitor; - double m_bufferedSinceLastMonitor { 0 }; - double m_averageBufferRate { 0 }; bool m_bufferedDirty { true }; // Can only grow. diff --git a/Source/WebCore/platform/graphics/SourceBufferPrivate.cpp b/Source/WebCore/platform/graphics/SourceBufferPrivate.cpp index e18e296e239e1..e24b4bc0ce202 100644 --- a/Source/WebCore/platform/graphics/SourceBufferPrivate.cpp +++ b/Source/WebCore/platform/graphics/SourceBufferPrivate.cpp @@ -961,6 +961,7 @@ void SourceBufferPrivate::didReceiveSample(Ref&& originalSample) // For instance, most WebM files are muxed rounded to the millisecond (the default TimecodeScale of the format) // but their durations use a finer timescale (causing a sub-millisecond overlap). More rarely, there are also // MP4 files with slightly off tfdt boxes, presenting a similar problem at the beginning of each fragment. + // Same as tolerance in SourceBuffer::canPlayThroughRange(). const MediaTime contiguousFrameTolerance = MediaTime(1, 1000); // If highest presentation timestamp for track buffer is set and less than or equal to presentation timestamp @@ -1108,9 +1109,7 @@ void SourceBufferPrivate::didReceiveSample(Ref&& originalSample) presentationEndTime = nearestToPresentationEndTime; trackBuffer.addBufferedRange(presentationTimestamp, presentationEndTime); - m_client->sourceBufferPrivateDidParseSample(frameDuration.toDouble()); setBufferedDirty(true); - break; } while (true); diff --git a/Source/WebCore/platform/graphics/SourceBufferPrivateClient.h b/Source/WebCore/platform/graphics/SourceBufferPrivateClient.h index b6a6ee1bfc41d..4a47902f0af68 100644 --- a/Source/WebCore/platform/graphics/SourceBufferPrivateClient.h +++ b/Source/WebCore/platform/graphics/SourceBufferPrivateClient.h @@ -85,7 +85,6 @@ class SourceBufferPrivateClient : public CanMakeWeakPtr&&) = 0; virtual void sourceBufferPrivateHighestPresentationTimestampChanged(const MediaTime&) = 0; - virtual void sourceBufferPrivateDidParseSample(double frameDuration) = 0; virtual void sourceBufferPrivateDidDropSample() = 0; virtual void sourceBufferPrivateBufferedDirtyChanged(bool) = 0; virtual void sourceBufferPrivateDidReceiveRenderingError(int64_t errorCode) = 0; diff --git a/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.cpp b/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.cpp index aa3004be45416..bddc19e98bcd0 100644 --- a/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.cpp +++ b/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.cpp @@ -147,14 +147,6 @@ void RemoteSourceBufferProxy::sourceBufferPrivateDurationChanged(const MediaTime m_connectionToWebProcess->connection().sendWithAsyncReply(Messages::SourceBufferPrivateRemote::SourceBufferPrivateDurationChanged(duration), WTFMove(completionHandler), m_identifier); } -void RemoteSourceBufferProxy::sourceBufferPrivateDidParseSample(double sampleDuration) -{ - if (!m_connectionToWebProcess) - return; - - m_connectionToWebProcess->connection().send(Messages::SourceBufferPrivateRemote::SourceBufferPrivateDidParseSample(sampleDuration), m_identifier); -} - void RemoteSourceBufferProxy::sourceBufferPrivateDidDropSample() { if (!m_connectionToWebProcess) diff --git a/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.h b/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.h index a9797ba9c7940..89b43300d32b2 100644 --- a/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.h +++ b/Source/WebKit/GPUProcess/media/RemoteSourceBufferProxy.h @@ -76,7 +76,6 @@ class RemoteSourceBufferProxy final void sourceBufferPrivateAppendComplete(WebCore::SourceBufferPrivateClient::AppendResult) final; void sourceBufferPrivateHighestPresentationTimestampChanged(const MediaTime&) final; void sourceBufferPrivateDurationChanged(const MediaTime&, CompletionHandler&&) final; - void sourceBufferPrivateDidParseSample(double sampleDuration) final; void sourceBufferPrivateDidDropSample() final; void sourceBufferPrivateBufferedDirtyChanged(bool) final; void sourceBufferPrivateDidReceiveRenderingError(int64_t errorCode) final; diff --git a/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.cpp b/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.cpp index 1c9e9a02e59d6..f3a78742a5112 100644 --- a/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.cpp +++ b/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.cpp @@ -441,12 +441,6 @@ void SourceBufferPrivateRemote::sourceBufferPrivateDurationChanged(const MediaTi completionHandler(); } -void SourceBufferPrivateRemote::sourceBufferPrivateDidParseSample(double sampleDuration) -{ - if (m_client) - m_client->sourceBufferPrivateDidParseSample(sampleDuration); -} - void SourceBufferPrivateRemote::sourceBufferPrivateDidDropSample() { if (m_client) diff --git a/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.h b/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.h index f14e8f01966d8..e7328946e59bb 100644 --- a/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.h +++ b/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.h @@ -114,7 +114,6 @@ class SourceBufferPrivateRemote final void sourceBufferPrivateAppendComplete(WebCore::SourceBufferPrivateClient::AppendResult, const WebCore::PlatformTimeRanges& buffered, uint64_t totalTrackBufferSizeInBytes, const MediaTime& timestampOffset); void sourceBufferPrivateHighestPresentationTimestampChanged(const MediaTime&); void sourceBufferPrivateDurationChanged(const MediaTime&, CompletionHandler&&); - void sourceBufferPrivateDidParseSample(double sampleDuration); void sourceBufferPrivateDidDropSample(); void sourceBufferPrivateDidReceiveRenderingError(int64_t errorCode); void sourceBufferPrivateBufferedDirtyChanged(bool dirty); diff --git a/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.messages.in b/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.messages.in index 7d459f763c19e..f80920107847f 100644 --- a/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.messages.in +++ b/Source/WebKit/WebProcess/GPU/media/SourceBufferPrivateRemote.messages.in @@ -32,7 +32,6 @@ messages -> SourceBufferPrivateRemote NotRefCounted { SourceBufferPrivateAppendComplete(WebCore::SourceBufferPrivateClient::AppendResult appendResult, WebCore::PlatformTimeRanges buffered, uint64_t totalTrackBufferSizeInBytes, MediaTime timeStampOffset) SourceBufferPrivateHighestPresentationTimestampChanged(MediaTime timestamp) SourceBufferPrivateDurationChanged(MediaTime duration) -> () - SourceBufferPrivateDidParseSample(double sampleDuration) SourceBufferPrivateDidDropSample() SourceBufferPrivateBufferedDirtyChanged(bool dirty) SourceBufferPrivateDidReceiveRenderingError(int64_t errorCode)