Skip to content

Commit

Permalink
Make Seek test work
Browse files Browse the repository at this point in the history
Prevent chaining of null GstBuffers and avoid seeks while appends are in progress.
  • Loading branch information
eocanha committed Jun 12, 2015
1 parent 057f6be commit b495d47
Show file tree
Hide file tree
Showing 10 changed files with 662 additions and 36 deletions.
10 changes: 9 additions & 1 deletion Source/WTF/wtf/MediaTime.cpp
Expand Up @@ -380,7 +380,7 @@ MediaTime::operator bool() const
return compare(zeroTime()) != EqualTo; return compare(zeroTime()) != EqualTo;
} }


MediaTime::ComparisonFlags MediaTime::compare(const MediaTime& rhs) const MediaTime::ComparisonFlags MediaTime::compare(const MediaTime& rhs, bool fuzzy) const
{ {
if ((isPositiveInfinite() && rhs.isPositiveInfinite()) if ((isPositiveInfinite() && rhs.isPositiveInfinite())
|| (isNegativeInfinite() && rhs.isNegativeInfinite()) || (isNegativeInfinite() && rhs.isNegativeInfinite())
Expand Down Expand Up @@ -409,6 +409,8 @@ MediaTime::ComparisonFlags MediaTime::compare(const MediaTime& rhs) const
if (hasDoubleValue() && rhs.hasDoubleValue()) { if (hasDoubleValue() && rhs.hasDoubleValue()) {
if (m_timeValueAsDouble == rhs.m_timeValueAsDouble) if (m_timeValueAsDouble == rhs.m_timeValueAsDouble)
return EqualTo; return EqualTo;
if (fuzzy && fabs(m_timeValueAsDouble - rhs.m_timeValueAsDouble) <= fuzzinessThreshold().toDouble())
return EqualTo;


return m_timeValueAsDouble < rhs.m_timeValueAsDouble ? LessThan : GreaterThan; return m_timeValueAsDouble < rhs.m_timeValueAsDouble ? LessThan : GreaterThan;
} }
Expand Down Expand Up @@ -469,6 +471,12 @@ const MediaTime& MediaTime::indefiniteTime()
return *time; return *time;
} }


const MediaTime& MediaTime::fuzzinessThreshold()
{
static const MediaTime* time = new MediaTime(0.00002);
return *time;
}

void MediaTime::setTimeScale(int32_t timeScale) void MediaTime::setTimeScale(int32_t timeScale)
{ {
if (hasDoubleValue()) { if (hasDoubleValue()) {
Expand Down
3 changes: 2 additions & 1 deletion Source/WTF/wtf/MediaTime.h
Expand Up @@ -85,7 +85,7 @@ class WTF_EXPORT_PRIVATE MediaTime {
GreaterThan = 1, GreaterThan = 1,
} ComparisonFlags; } ComparisonFlags;


ComparisonFlags compare(const MediaTime& rhs) const; ComparisonFlags compare(const MediaTime& rhs, bool fuzzy = false) const;


bool isValid() const { return m_timeFlags & Valid; } bool isValid() const { return m_timeFlags & Valid; }
bool isInvalid() const { return !isValid(); } bool isInvalid() const { return !isValid(); }
Expand All @@ -100,6 +100,7 @@ class WTF_EXPORT_PRIVATE MediaTime {
static const MediaTime& positiveInfiniteTime(); static const MediaTime& positiveInfiniteTime();
static const MediaTime& negativeInfiniteTime(); static const MediaTime& negativeInfiniteTime();
static const MediaTime& indefiniteTime(); static const MediaTime& indefiniteTime();
static const MediaTime& fuzzinessThreshold();


const int64_t& timeValue() const { return m_timeValue; } const int64_t& timeValue() const { return m_timeValue; }
const int32_t& timeScale() const { return m_timeScale; } const int32_t& timeScale() const { return m_timeScale; }
Expand Down
2 changes: 1 addition & 1 deletion Source/WebCore/platform/graphics/PlatformTimeRanges.h
Expand Up @@ -101,7 +101,7 @@ class PlatformTimeRanges {


inline bool isContiguousWithRange(const Range& range) const inline bool isContiguousWithRange(const Range& range) const
{ {
return range.m_start == m_end || range.m_end == m_start; return range.m_start.compare(m_end, true) == MediaTime::EqualTo || range.m_end.compare(m_start, true) == MediaTime::EqualTo;
} }


inline Range unionWithOverlappingOrContiguousRange(const Range& range) const inline Range unionWithOverlappingOrContiguousRange(const Range& range) const
Expand Down
2 changes: 2 additions & 0 deletions Source/WebCore/platform/graphics/SourceBufferPrivateClient.h
Expand Up @@ -89,6 +89,8 @@ class SourceBufferPrivateClient {
}; };
virtual void sourceBufferPrivateAppendComplete(SourceBufferPrivate*, AppendResult) = 0; virtual void sourceBufferPrivateAppendComplete(SourceBufferPrivate*, AppendResult) = 0;
virtual void sourceBufferPrivateDidReceiveRenderingError(SourceBufferPrivate*, int errocCode) = 0; virtual void sourceBufferPrivateDidReceiveRenderingError(SourceBufferPrivate*, int errocCode) = 0;

virtual double timestampOffset() const = 0;
}; };


} }
Expand Down
Expand Up @@ -302,6 +302,7 @@ MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this); reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);


if (m_source && WEBKIT_IS_MEDIA_SRC(m_source.get())) { if (m_source && WEBKIT_IS_MEDIA_SRC(m_source.get())) {
webkit_media_src_set_mediaplayerprivate(WEBKIT_MEDIA_SRC(m_source.get()), 0);
g_signal_handlers_disconnect_by_func(m_source.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateVideoChangedCallback), this); g_signal_handlers_disconnect_by_func(m_source.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateVideoChangedCallback), this);
g_signal_handlers_disconnect_by_func(m_source.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateAudioChangedCallback), this); g_signal_handlers_disconnect_by_func(m_source.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateAudioChangedCallback), this);
g_signal_handlers_disconnect_by_func(m_source.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateTextChangedCallback), this); g_signal_handlers_disconnect_by_func(m_source.get(), reinterpret_cast<gpointer>(mediaPlayerPrivateTextChangedCallback), this);
Expand Down Expand Up @@ -607,7 +608,11 @@ void MediaPlayerPrivateGStreamer::seek(float time)
LOG_MEDIA_MESSAGE("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult)); LOG_MEDIA_MESSAGE("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
return; return;
} }
if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) { if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED
#if ENABLE(MEDIA_SOURCE)
|| (isMediaSource() && webkit_media_src_is_appending(WEBKIT_MEDIA_SRC(m_source.get())))
#endif
|| m_isEndReached) {
m_seekIsPending = true; m_seekIsPending = true;
if (m_isEndReached) { if (m_isEndReached) {
LOG_MEDIA_MESSAGE("[Seek] reset pipeline"); LOG_MEDIA_MESSAGE("[Seek] reset pipeline");
Expand All @@ -624,12 +629,22 @@ void MediaPlayerPrivateGStreamer::seek(float time)
endTime = clockTime; endTime = clockTime;
} }


#if ENABLE(MEDIA_SOURCE)
if (isMediaSource())
webkit_media_src_set_seek_time(WEBKIT_MEDIA_SRC(m_source.get()), MediaTime(double(time)));
#endif

// We can seek now. // We can seek now.
if (!gst_element_seek(m_playBin.get(), m_player->rate(), GST_FORMAT_TIME, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE), if (!gst_element_seek(m_playBin.get(), m_player->rate(), GST_FORMAT_TIME, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime)) { GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime)) {
LOG_MEDIA_MESSAGE("[Seek] seeking to %f failed", time); LOG_MEDIA_MESSAGE("[Seek] seeking to %f failed", time);
return; return;
} }

#if ENABLE(MEDIA_SOURCE)
if (isMediaSource())
m_mediaSource->seekToTime(time);
#endif
} }


m_seeking = true; m_seeking = true;
Expand All @@ -641,11 +656,6 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
{ {
gint64 startTime, endTime; gint64 startTime, endTime;


// TODO: Should do more than that, need to notify the media source
// and probably flush the pipeline at least
if (isMediaSource())
return true;

if (rate > 0) { if (rate > 0) {
startTime = position; startTime = position;
endTime = GST_CLOCK_TIME_NONE; endTime = GST_CLOCK_TIME_NONE;
Expand All @@ -662,8 +672,21 @@ bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFla
if (!rate) if (!rate)
rate = 1.0; rate = 1.0;


return gst_element_seek(m_playBin.get(), rate, GST_FORMAT_TIME, seekType, MediaTime time = MediaTime(double(static_cast<double>(position) / GST_SECOND));
GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime);
if (isMediaSource()) {
webkit_media_src_set_seek_time(WEBKIT_MEDIA_SRC(m_source.get()), time);
}

if (!gst_element_seek(m_playBin.get(), rate, GST_FORMAT_TIME, seekType,
GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime))
return false;

if (isMediaSource()) {
m_mediaSource->seekToTime(time);
}

return true;
} }


void MediaPlayerPrivateGStreamer::updatePlaybackRate() void MediaPlayerPrivateGStreamer::updatePlaybackRate()
Expand Down Expand Up @@ -741,7 +764,9 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
{ {
m_videoTimerHandler = 0; m_videoTimerHandler = 0;
gint numTracks = 0; gint numTracks = 0;
#if ENABLE(MEDIA_SOURCE)
bool useMediaSource = false; bool useMediaSource = false;
#endif
if (m_playBin) { if (m_playBin) {
#if ENABLE(MEDIA_SOURCE) #if ENABLE(MEDIA_SOURCE)
if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) { if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) {
Expand Down Expand Up @@ -832,7 +857,9 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
{ {
m_audioTimerHandler = 0; m_audioTimerHandler = 0;
gint numTracks = 0; gint numTracks = 0;
#if ENABLE(MEDIA_SOURCE)
bool useMediaSource = false; bool useMediaSource = false;
#endif
if (m_playBin) { if (m_playBin) {
#if ENABLE(MEDIA_SOURCE) #if ENABLE(MEDIA_SOURCE)
if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) { if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) {
Expand Down Expand Up @@ -917,7 +944,9 @@ void MediaPlayerPrivateGStreamer::textChanged()
void MediaPlayerPrivateGStreamer::notifyPlayerOfText() void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
{ {
gint numTracks = 0; gint numTracks = 0;
#if ENABLE(MEDIA_SOURCE)
bool useMediaSource = false; bool useMediaSource = false;
#endif
if (m_playBin) { if (m_playBin) {
#if ENABLE(MEDIA_SOURCE) #if ENABLE(MEDIA_SOURCE)
if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) { if (m_mediaSource && WEBKIT_IS_MEDIA_SRC(m_source.get())) {
Expand Down Expand Up @@ -1105,6 +1134,55 @@ struct MainThreadNeedKeyCallbackInfo {


static gboolean mediaPlayerPrivateNotifyDurationChanged(MediaPlayerPrivateGStreamer* instance); static gboolean mediaPlayerPrivateNotifyDurationChanged(MediaPlayerPrivateGStreamer* instance);


#if ENABLE(MEDIA_SOURCE)
static StreamType getStreamType(GstElement* element)
{
g_return_val_if_fail(GST_IS_ELEMENT(element), STREAM_TYPE_UNKNOWN);

GstIterator* it;
GValue item = G_VALUE_INIT;
StreamType result = STREAM_TYPE_UNKNOWN;

it = gst_element_iterate_sink_pads(element);

if (it && (gst_iterator_next(it, &item)) == GST_ITERATOR_OK) {
GstPad* pad = GST_PAD(g_value_get_object(&item));
if (pad) {
GstCaps* caps = gst_pad_get_current_caps(pad);
if (caps && GST_IS_CAPS(caps)) {
const GstStructure* structure = gst_caps_get_structure(caps, 0);
if (structure) {
const gchar* mediatype = gst_structure_get_name(structure);
if (mediatype) {
// Look for "audio", "video", "text"
switch (mediatype[0]) {
case 'a':
result = STREAM_TYPE_AUDIO;
break;
case 'v':
result = STREAM_TYPE_VIDEO;
break;
case 't':
result = STREAM_TYPE_TEXT;
break;
default:
break;
}
}
}
gst_caps_unref(caps);
}
}
}
g_value_unset(&item);

if (it)
gst_iterator_free(it);

return result;
}
#endif

void MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message) void MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
{ {
switch (GST_MESSAGE_TYPE(message)) { switch (GST_MESSAGE_TYPE(message)) {
Expand Down Expand Up @@ -1288,6 +1366,15 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
case GST_MESSAGE_TOC: case GST_MESSAGE_TOC:
processTableOfContents(message); processTableOfContents(message);
break; break;
#endif
#if ENABLE(MEDIA_SOURCE)
case GST_MESSAGE_RESET_TIME:
if (m_source && WEBKIT_IS_MEDIA_SRC(m_source.get())) {
StreamType streamType = getStreamType(GST_ELEMENT(GST_MESSAGE_SRC(message)));
if (streamType == STREAM_TYPE_AUDIO || streamType == STREAM_TYPE_VIDEO)
webkit_media_src_segment_needed(WEBKIT_MEDIA_SRC(m_source.get()), streamType);
}
break;
#endif #endif
default: default:
LOG_MEDIA_MESSAGE("Unhandled GStreamer message type: %s", LOG_MEDIA_MESSAGE("Unhandled GStreamer message type: %s",
Expand Down Expand Up @@ -1491,7 +1578,13 @@ float MediaPlayerPrivateGStreamer::maxTimeLoaded() const


bool MediaPlayerPrivateGStreamer::didLoadingProgress() const bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
{ {
if (!m_playBin || !m_mediaDuration || (!isMediaSource() && !totalBytes())) if (!m_playBin ||
#if ENABLE(MEDIA_SOURCE)
(!isMediaSource() && !totalBytes())
#else
!totalBytes()
#endif
|| !m_mediaDuration)
return false; return false;
float currentMaxTimeLoaded = maxTimeLoaded(); float currentMaxTimeLoaded = maxTimeLoaded();
bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress; bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
Expand Down Expand Up @@ -1606,6 +1699,7 @@ void MediaPlayerPrivateGStreamer::sourceChanged()
g_signal_connect(m_source.get(), "video-changed", G_CALLBACK(mediaPlayerPrivateVideoChangedCallback), this); g_signal_connect(m_source.get(), "video-changed", G_CALLBACK(mediaPlayerPrivateVideoChangedCallback), this);
g_signal_connect(m_source.get(), "audio-changed", G_CALLBACK(mediaPlayerPrivateAudioChangedCallback), this); g_signal_connect(m_source.get(), "audio-changed", G_CALLBACK(mediaPlayerPrivateAudioChangedCallback), this);
g_signal_connect(m_source.get(), "text-changed", G_CALLBACK(mediaPlayerPrivateTextChangedCallback), this); g_signal_connect(m_source.get(), "text-changed", G_CALLBACK(mediaPlayerPrivateTextChangedCallback), this);
webkit_media_src_set_mediaplayerprivate(WEBKIT_MEDIA_SRC(m_source.get()), this);
} }
#endif #endif
} }
Expand Down Expand Up @@ -1951,6 +2045,20 @@ void MediaPlayerPrivateGStreamer::didEnd()
} }
} }


#if ENABLE(MEDIA_SOURCE)
void MediaPlayerPrivateGStreamer::notifyAppendComplete()
{
if (m_seekIsPending) {
updatePlaybackRate();
LOG_MEDIA_MESSAGE("[Seek] committing pending seek to %f after append completed", m_seekTime);
m_seekIsPending = false;
m_seeking = doSeek(toGstClockTime(m_seekTime), m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
if (!m_seeking)
LOG_MEDIA_MESSAGE("[Seek] seeking to %f failed", m_seekTime);
}
}
#endif

void MediaPlayerPrivateGStreamer::cacheDuration() void MediaPlayerPrivateGStreamer::cacheDuration()
{ {
if (m_mediaDuration || !m_mediaDurationKnown) if (m_mediaDuration || !m_mediaDurationKnown)
Expand Down Expand Up @@ -2298,6 +2406,13 @@ void MediaPlayerPrivateGStreamer::createAudioSink()
gst_element_add_pad(audioSink, gst_ghost_pad_new("sink", pad.get())); gst_element_add_pad(audioSink, gst_ghost_pad_new("sink", pad.get()));


g_object_set(m_playBin.get(), "audio-sink", audioSink, NULL); g_object_set(m_playBin.get(), "audio-sink", audioSink, NULL);

GRefPtr<GstElement> playsink = adoptGRef(gst_bin_get_by_name(GST_BIN(m_playBin.get()), "playsink"));
if (playsink) {
// The default value (0) means "send events to all the sinks", instead
// of "only to the first that returns true". This is needed for MSE seek.
g_object_set(G_OBJECT(playsink.get()), "send-event-mode", 0, NULL);
}
} }


void MediaPlayerPrivateGStreamer::createGSTPlayBin() void MediaPlayerPrivateGStreamer::createGSTPlayBin()
Expand Down
Expand Up @@ -136,6 +136,10 @@ class MediaPlayerPrivateGStreamer : public MediaPlayerPrivateGStreamerBase {
#endif #endif


static void notifyDurationChanged(MediaPlayerPrivateGStreamer* instance); static void notifyDurationChanged(MediaPlayerPrivateGStreamer* instance);
virtual bool isLiveStream() const { return m_isStreaming; }
#if ENABLE(MEDIA_SOURCE)
void notifyAppendComplete();
#endif


private: private:
MediaPlayerPrivateGStreamer(MediaPlayer*); MediaPlayerPrivateGStreamer(MediaPlayer*);
Expand Down Expand Up @@ -182,7 +186,6 @@ class MediaPlayerPrivateGStreamer : public MediaPlayerPrivateGStreamerBase {
void updatePlaybackRate(); void updatePlaybackRate();


virtual String engineDescription() const { return "GStreamer"; } virtual String engineDescription() const { return "GStreamer"; }
virtual bool isLiveStream() const { return m_isStreaming; }
virtual bool didPassCORSAccessCheck() const; virtual bool didPassCORSAccessCheck() const;


#if ENABLE(ENCRYPTED_MEDIA_V2) #if ENABLE(ENCRYPTED_MEDIA_V2)
Expand Down
Expand Up @@ -100,22 +100,21 @@ void SourceBufferPrivateGStreamer::setReadyState(MediaPlayer::ReadyState state)
m_mediaSource->setReadyState(state); m_mediaSource->setReadyState(state);
} }


// TODO: Implement these void SourceBufferPrivateGStreamer::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample> > samples, AtomicString trackIDString)
void SourceBufferPrivateGStreamer::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample> >, AtomicString)
{ {
notImplemented(); if (m_client)
m_client->flushAndEnqueueNonDisplayingSamples(samples, trackIDString);
} }


void SourceBufferPrivateGStreamer::enqueueSample(PassRefPtr<MediaSample>, AtomicString) void SourceBufferPrivateGStreamer::enqueueSample(PassRefPtr<MediaSample> sample, AtomicString trackIDString)
{ {
notImplemented(); if (m_client)
m_client->enqueueSample(sample, trackIDString);
} }


bool SourceBufferPrivateGStreamer::isReadyForMoreSamples(AtomicString) bool SourceBufferPrivateGStreamer::isReadyForMoreSamples(AtomicString)
{ {
notImplemented(); return true;

return false;
} }


void SourceBufferPrivateGStreamer::setActive(bool isActive) void SourceBufferPrivateGStreamer::setActive(bool isActive)
Expand Down Expand Up @@ -155,5 +154,13 @@ void SourceBufferPrivateGStreamer::didReceiveAllPendingSamples()
} }
#endif #endif


double SourceBufferPrivateGStreamer::timestampOffset() const
{
if (m_sourceBufferPrivateClient)
return m_sourceBufferPrivateClient->timestampOffset();
else
return 0.0;
}

} }
#endif #endif
Expand Up @@ -67,6 +67,7 @@ class SourceBufferPrivateGStreamer FINAL : public SourceBufferPrivate {
virtual void notifyClientWhenReadyForMoreSamples(AtomicString) OVERRIDE; virtual void notifyClientWhenReadyForMoreSamples(AtomicString) OVERRIDE;
virtual bool isAborted() { return m_aborted; } virtual bool isAborted() { return m_aborted; }
virtual void resetAborted() { m_aborted = false; } virtual void resetAborted() { m_aborted = false; }
virtual double timestampOffset() const;


private: private:
SourceBufferPrivateGStreamer(MediaSourceGStreamer*, PassRefPtr<MediaSourceClientGStreamer>, const ContentType&); SourceBufferPrivateGStreamer(MediaSourceGStreamer*, PassRefPtr<MediaSourceClientGStreamer>, const ContentType&);
Expand Down

0 comments on commit b495d47

Please sign in to comment.