Skip to content
Permalink
main
Switch branches/tags
Go to file
 
 
Cannot retrieve contributors at this time
/*
* Copyright (C) 2007-2022 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "HTMLMediaElement.h"
#if ENABLE(VIDEO)
#include "ApplicationCacheHost.h"
#include "ApplicationCacheResource.h"
#include "Attribute.h"
#include "AudioTrackList.h"
#include "AudioTrackPrivate.h"
#include "Blob.h"
#include "BlobURL.h"
#include "CSSPropertyNames.h"
#include "CSSValueKeywords.h"
#include "Chrome.h"
#include "ChromeClient.h"
#include "CommonAtomStrings.h"
#include "CommonVM.h"
#include "ContentRuleListResults.h"
#include "ContentSecurityPolicy.h"
#include "ContentType.h"
#include "CookieJar.h"
#include "DeprecatedGlobalSettings.h"
#include "DiagnosticLoggingClient.h"
#include "DiagnosticLoggingKeys.h"
#include "Document.h"
#include "DocumentInlines.h"
#include "DocumentLoader.h"
#include "ElementChildIterator.h"
#include "EventLoop.h"
#include "EventNames.h"
#include "Frame.h"
#include "FrameLoader.h"
#include "FrameLoaderClient.h"
#include "FrameView.h"
#include "FullscreenManager.h"
#include "HTMLParserIdioms.h"
#include "HTMLSourceElement.h"
#include "HTMLTrackElement.h"
#include "HTMLVideoElement.h"
#include "ImageOverlay.h"
#include "InbandGenericTextTrack.h"
#include "InbandTextTrackPrivate.h"
#include "InbandWebVTTTextTrack.h"
#include "InspectorInstrumentation.h"
#include "JSDOMException.h"
#include "JSDOMPromiseDeferred.h"
#include "JSHTMLMediaElement.h"
#include "JSMediaControlsHost.h"
#include "LoadableTextTrack.h"
#include "Logging.h"
#include "MIMETypeRegistry.h"
#include "MediaController.h"
#include "MediaControlsHost.h"
#include "MediaDevices.h"
#include "MediaDocument.h"
#include "MediaError.h"
#include "MediaFragmentURIParser.h"
#include "MediaList.h"
#include "MediaPlayer.h"
#include "MediaQueryEvaluator.h"
#include "MediaResourceLoader.h"
#include "NavigatorMediaDevices.h"
#include "NetworkingContext.h"
#include "PODIntervalTree.h"
#include "PageGroup.h"
#include "PageInlines.h"
#include "PictureInPictureSupport.h"
#include "PlatformMediaSessionManager.h"
#include "PlatformTextTrack.h"
#include "ProgressTracker.h"
#include "Quirks.h"
#include "RegistrableDomain.h"
#include "RenderLayerCompositor.h"
#include "RenderTheme.h"
#include "RenderVideo.h"
#include "RenderView.h"
#include "ResourceLoadInfo.h"
#include "ScriptController.h"
#include "ScriptDisallowedScope.h"
#include "ScriptSourceCode.h"
#include "SecurityOriginData.h"
#include "SecurityPolicy.h"
#include "Settings.h"
#include "ShadowRoot.h"
#include "SleepDisabler.h"
#include "TextTrackCueList.h"
#include "TextTrackList.h"
#include "ThreadableBlobRegistry.h"
#include "TimeRanges.h"
#include "UserContentController.h"
#include "UserGestureIndicator.h"
#include "VideoPlaybackQuality.h"
#include "VideoTrack.h"
#include "VideoTrackList.h"
#include "VideoTrackPrivate.h"
#include "WebCoreJSClientData.h"
#include <JavaScriptCore/ScriptObject.h>
#include <JavaScriptCore/Uint8Array.h>
#include <limits>
#include <pal/SessionID.h>
#include <wtf/Algorithms.h>
#include <wtf/IsoMallocInlines.h>
#include <wtf/Language.h>
#include <wtf/MathExtras.h>
#include <wtf/MemoryPressureHandler.h>
#include <wtf/Ref.h>
#include <wtf/text/CString.h>
#if USE(AUDIO_SESSION)
#include "AudioSession.h"
#endif
#if ENABLE(WEB_AUDIO)
#include "AudioSourceProvider.h"
#include "MediaElementAudioSourceNode.h"
#endif
#if PLATFORM(IOS_FAMILY)
#include "RuntimeApplicationChecks.h"
#include "VideoFullscreenInterfaceAVKit.h"
#endif
#if HAVE(PIP_CONTROLLER)
#include "VideoFullscreenInterfacePiP.h"
#endif
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
#include "RemotePlayback.h"
#include "WebKitPlaybackTargetAvailabilityEvent.h"
#endif
#if ENABLE(MEDIA_SOURCE)
#include "DOMWindow.h"
#include "MediaSource.h"
#endif
#if ENABLE(MEDIA_STREAM)
#include "MediaStream.h"
#endif
#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
#include "WebKitMediaKeyNeededEvent.h"
#include "WebKitMediaKeys.h"
#endif
#if ENABLE(ENCRYPTED_MEDIA)
#include "MediaEncryptedEvent.h"
#include "MediaKeys.h"
#endif
#if ENABLE(ENCRYPTED_MEDIA)
#include "NotImplemented.h"
#endif
#if ENABLE(VIDEO_PRESENTATION_MODE)
#include "VideoFullscreenModel.h"
#endif
#if ENABLE(MEDIA_SESSION)
#include "MediaSession.h"
#endif
#if ENABLE(MEDIA_SESSION_COORDINATOR)
#include "MediaSessionCoordinator.h"
#endif
namespace WTF {
template <>
struct LogArgument<URL> {
static String toString(const URL& url)
{
#if !LOG_DISABLED
static const unsigned maximumURLLengthForLogging = 512;
if (url.string().length() < maximumURLLengthForLogging)
return url.string();
return makeString(StringView(url.string()).left(maximumURLLengthForLogging), "...");
#else
UNUSED_PARAM(url);
return "[url]"_s;
#endif
}
};
}
namespace WebCore {
WTF_MAKE_ISO_ALLOCATED_IMPL(HTMLMediaElement);
static const Seconds SeekRepeatDelay { 100_ms };
static const double SeekTime = 0.2;
static const Seconds ScanRepeatDelay { 1.5_s };
static const double ScanMaximumRate = 8;
static const double AutoplayInterferenceTimeThreshold = 10;
static const Seconds hideMediaControlsAfterEndedDelay { 6_s };
#if ENABLE(MEDIA_SOURCE)
// URL protocol used to signal that the media source API is being used.
static constexpr auto mediaSourceBlobProtocol = "blob"_s;
#endif
#if ENABLE(MEDIA_STREAM)
// URL protocol used to signal that the media stream API is being used.
static constexpr auto mediaStreamBlobProtocol = "blob"_s;
#endif
using namespace HTMLNames;
String convertEnumerationToString(HTMLMediaElement::ReadyState enumerationValue)
{
static const NeverDestroyed<String> values[] = {
MAKE_STATIC_STRING_IMPL("HAVE_NOTHING"),
MAKE_STATIC_STRING_IMPL("HAVE_METADATA"),
MAKE_STATIC_STRING_IMPL("HAVE_CURRENT_DATA"),
MAKE_STATIC_STRING_IMPL("HAVE_FUTURE_DATA"),
MAKE_STATIC_STRING_IMPL("HAVE_ENOUGH_DATA"),
};
static_assert(static_cast<size_t>(HTMLMediaElementEnums::HAVE_NOTHING) == 0, "HTMLMediaElementEnums::HAVE_NOTHING is not 0 as expected");
static_assert(static_cast<size_t>(HTMLMediaElementEnums::HAVE_METADATA) == 1, "HTMLMediaElementEnums::HAVE_METADATA is not 1 as expected");
static_assert(static_cast<size_t>(HTMLMediaElementEnums::HAVE_CURRENT_DATA) == 2, "HTMLMediaElementEnums::HAVE_CURRENT_DATA is not 2 as expected");
static_assert(static_cast<size_t>(HTMLMediaElementEnums::HAVE_FUTURE_DATA) == 3, "HTMLMediaElementEnums::HAVE_FUTURE_DATA is not 3 as expected");
static_assert(static_cast<size_t>(HTMLMediaElementEnums::HAVE_ENOUGH_DATA) == 4, "HTMLMediaElementEnums::HAVE_ENOUGH_DATA is not 4 as expected");
ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
return values[static_cast<size_t>(enumerationValue)];
}
String convertEnumerationToString(HTMLMediaElement::NetworkState enumerationValue)
{
static const NeverDestroyed<String> values[] = {
MAKE_STATIC_STRING_IMPL("NETWORK_EMPTY"),
MAKE_STATIC_STRING_IMPL("NETWORK_IDLE"),
MAKE_STATIC_STRING_IMPL("NETWORK_LOADING"),
MAKE_STATIC_STRING_IMPL("NETWORK_NO_SOURCE"),
};
static_assert(static_cast<size_t>(HTMLMediaElementEnums::NETWORK_EMPTY) == 0, "HTMLMediaElementEnums::NETWORK_EMPTY is not 0 as expected");
static_assert(static_cast<size_t>(HTMLMediaElementEnums::NETWORK_IDLE) == 1, "HTMLMediaElementEnums::NETWORK_IDLE is not 1 as expected");
static_assert(static_cast<size_t>(HTMLMediaElementEnums::NETWORK_LOADING) == 2, "HTMLMediaElementEnums::NETWORK_LOADING is not 2 as expected");
static_assert(static_cast<size_t>(HTMLMediaElementEnums::NETWORK_NO_SOURCE) == 3, "HTMLMediaElementEnums::NETWORK_NO_SOURCE is not 3 as expected");
ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
return values[static_cast<size_t>(enumerationValue)];
}
String convertEnumerationToString(HTMLMediaElement::AutoplayEventPlaybackState enumerationValue)
{
static const NeverDestroyed<String> values[] = {
MAKE_STATIC_STRING_IMPL("None"),
MAKE_STATIC_STRING_IMPL("PreventedAutoplay"),
MAKE_STATIC_STRING_IMPL("StartedWithUserGesture"),
MAKE_STATIC_STRING_IMPL("StartedWithoutUserGesture"),
};
static_assert(static_cast<size_t>(HTMLMediaElement::AutoplayEventPlaybackState::None) == 0, "AutoplayEventPlaybackState::None is not 0 as expected");
static_assert(static_cast<size_t>(HTMLMediaElement::AutoplayEventPlaybackState::PreventedAutoplay) == 1, "AutoplayEventPlaybackState::PreventedAutoplay is not 1 as expected");
static_assert(static_cast<size_t>(HTMLMediaElement::AutoplayEventPlaybackState::StartedWithUserGesture) == 2, "AutoplayEventPlaybackState::StartedWithUserGesture is not 2 as expected");
static_assert(static_cast<size_t>(HTMLMediaElement::AutoplayEventPlaybackState::StartedWithoutUserGesture) == 3, "AutoplayEventPlaybackState::StartedWithoutUserGesture is not 3 as expected");
ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
return values[static_cast<size_t>(enumerationValue)];
}
String convertEnumerationToString(HTMLMediaElement::TextTrackVisibilityCheckType enumerationValue)
{
static const NeverDestroyed<String> values[] = {
MAKE_STATIC_STRING_IMPL("CheckTextTrackVisibility"),
MAKE_STATIC_STRING_IMPL("AssumeTextTrackVisibilityChanged"),
};
static_assert(static_cast<size_t>(HTMLMediaElement::TextTrackVisibilityCheckType::CheckTextTrackVisibility) == 0, "TextTrackVisibilityCheckType::CheckTextTrackVisibility is not 0 as expected");
static_assert(static_cast<size_t>(HTMLMediaElement::TextTrackVisibilityCheckType::AssumeTextTrackVisibilityChanged) == 1, "TextTrackVisibilityCheckType::AssumeTextTrackVisibilityChanged is not 1 as expected");
ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
return values[static_cast<size_t>(enumerationValue)];
}
class TrackDisplayUpdateScope {
public:
TrackDisplayUpdateScope(HTMLMediaElement& element)
: m_element(element)
{
m_element.beginIgnoringTrackDisplayUpdateRequests();
}
~TrackDisplayUpdateScope()
{
m_element.endIgnoringTrackDisplayUpdateRequests();
}
private:
HTMLMediaElement& m_element;
};
struct HTMLMediaElement::TrackGroup {
enum GroupKind { CaptionsAndSubtitles, Description, Chapter, Metadata, Other };
TrackGroup(GroupKind kind)
: kind(kind)
{
}
Vector<RefPtr<TextTrack>> tracks;
RefPtr<TextTrack> visibleTrack;
RefPtr<TextTrack> defaultTrack;
GroupKind kind;
bool hasSrcLang { false };
};
HashSet<HTMLMediaElement*>& HTMLMediaElement::allMediaElements()
{
static NeverDestroyed<HashSet<HTMLMediaElement*>> elements;
return elements;
}
struct MediaElementSessionInfo {
const MediaElementSession* session;
MediaElementSession::PlaybackControlsPurpose purpose;
MonotonicTime timeOfLastUserInteraction;
bool canShowControlsManager : 1;
bool isVisibleInViewportOrFullscreen : 1;
bool isLargeEnoughForMainContent : 1;
bool isPlayingAudio : 1;
bool hasEverNotifiedAboutPlaying : 1;
};
static MediaElementSessionInfo mediaElementSessionInfoForSession(const MediaElementSession& session, MediaElementSession::PlaybackControlsPurpose purpose)
{
const HTMLMediaElement& element = session.element();
return {
&session,
purpose,
session.mostRecentUserInteractionTime(),
session.canShowControlsManager(purpose),
element.isFullscreen() || element.isVisibleInViewport(),
session.isLargeEnoughForMainContent(MediaSessionMainContentPurpose::MediaControls),
element.isPlaying() && element.hasAudio() && !element.muted(),
element.hasEverNotifiedAboutPlaying()
};
}
static bool preferMediaControlsForCandidateSessionOverOtherCandidateSession(const MediaElementSessionInfo& session, const MediaElementSessionInfo& otherSession)
{
MediaElementSession::PlaybackControlsPurpose purpose = session.purpose;
ASSERT(purpose == otherSession.purpose);
// For the controls manager and MediaSession, prioritize visible media over offscreen media.
if ((purpose == MediaElementSession::PlaybackControlsPurpose::ControlsManager || purpose == MediaElementSession::PlaybackControlsPurpose::MediaSession)
&& session.isVisibleInViewportOrFullscreen != otherSession.isVisibleInViewportOrFullscreen)
return session.isVisibleInViewportOrFullscreen;
// For Now Playing and MediaSession, prioritize elements that would normally satisfy main content.
if ((purpose == MediaElementSession::PlaybackControlsPurpose::NowPlaying || purpose == MediaElementSession::PlaybackControlsPurpose::MediaSession)
&& session.isLargeEnoughForMainContent != otherSession.isLargeEnoughForMainContent)
return session.isLargeEnoughForMainContent;
// For MediaSession, prioritize elements that have been played before.
if (purpose == MediaElementSession::PlaybackControlsPurpose::MediaSession
&& session.hasEverNotifiedAboutPlaying != otherSession.hasEverNotifiedAboutPlaying)
return session.hasEverNotifiedAboutPlaying;
// As a tiebreaker, prioritize elements that the user recently interacted with.
return session.timeOfLastUserInteraction > otherSession.timeOfLastUserInteraction;
}
static bool mediaSessionMayBeConfusedWithMainContent(const MediaElementSessionInfo& session, MediaElementSession::PlaybackControlsPurpose purpose)
{
if (purpose == MediaElementSession::PlaybackControlsPurpose::MediaSession)
return false;
if (purpose == MediaElementSession::PlaybackControlsPurpose::NowPlaying)
return session.isPlayingAudio;
if (!session.isVisibleInViewportOrFullscreen)
return false;
if (!session.isLargeEnoughForMainContent)
return false;
// Even if this video is not a candidate, if it is visible to the user and large enough
// to be main content, it poses a risk for being confused with main content.
return true;
}
static bool defaultVolumeLocked()
{
#if PLATFORM(IOS)
return true;
#else
return false;
#endif
}
HTMLMediaElement::HTMLMediaElement(const QualifiedName& tagName, Document& document, bool createdByParser)
: HTMLElement(tagName, document)
, ActiveDOMObject(document)
, m_progressEventTimer(*this, &HTMLMediaElement::progressEventTimerFired)
, m_playbackProgressTimer(*this, &HTMLMediaElement::playbackProgressTimerFired)
, m_scanTimer(*this, &HTMLMediaElement::scanTimerFired)
, m_playbackControlsManagerBehaviorRestrictionsTimer(*this, &HTMLMediaElement::playbackControlsManagerBehaviorRestrictionsTimerFired)
, m_seekToPlaybackPositionEndedTimer(*this, &HTMLMediaElement::seekToPlaybackPositionEndedTimerFired)
, m_lastTimeUpdateEventMovieTime(MediaTime::positiveInfiniteTime())
, m_firstTimePlaying(true)
, m_playing(false)
, m_isWaitingUntilMediaCanStart(false)
, m_shouldDelayLoadEvent(false)
, m_haveFiredLoadedData(false)
, m_inActiveDocument(true)
, m_autoplaying(true)
, m_muted(false)
, m_explicitlyMuted(false)
, m_initiallyMuted(false)
, m_paused(true)
, m_seeking(false)
, m_seekRequested(false)
, m_wasPlayingBeforeSeeking(false)
, m_sentStalledEvent(false)
, m_sentEndEvent(false)
, m_pausedInternal(false)
, m_closedCaptionsVisible(false)
, m_webkitLegacyClosedCaptionOverride(false)
, m_completelyLoaded(false)
, m_havePreparedToPlay(false)
, m_parsingInProgress(createdByParser)
, m_elementIsHidden(document.hidden())
, m_creatingControls(false)
, m_receivedLayoutSizeChanged(false)
, m_hasEverNotifiedAboutPlaying(false)
, m_hasEverHadAudio(false)
, m_hasEverHadVideo(false)
, m_mediaControlsDependOnPageScaleFactor(false)
, m_haveSetUpCaptionContainer(false)
, m_isScrubbingRemotely(false)
, m_waitingToEnterFullscreen(false)
, m_changingVideoFullscreenMode(false)
, m_showPoster(true)
, m_tracksAreReady(true)
, m_haveVisibleTextTrack(false)
, m_processingPreferenceChange(false)
, m_shouldAudioPlaybackRequireUserGesture(document.topDocument().audioPlaybackRequiresUserGesture() && !processingUserGestureForMedia())
, m_shouldVideoPlaybackRequireUserGesture(document.topDocument().videoPlaybackRequiresUserGesture() && !processingUserGestureForMedia())
, m_volumeLocked(defaultVolumeLocked())
, m_opaqueRootProvider([this] { return opaqueRoot(); })
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
, m_remote(RemotePlayback::create(*this))
#endif
#if !RELEASE_LOG_DISABLED
, m_logger(&document.logger())
, m_logIdentifier(uniqueLogIdentifier())
#endif
#if USE(AUDIO_SESSION)
, m_categoryAtMostRecentPlayback(AudioSessionCategory::None)
#endif
{
allMediaElements().add(this);
ALWAYS_LOG(LOGIDENTIFIER);
setHasCustomStyleResolveCallbacks();
InspectorInstrumentation::addEventListenersToNode(*this);
}
void HTMLMediaElement::initializeMediaSession()
{
ASSERT(!m_mediaSession);
m_mediaSession = makeUnique<MediaElementSession>(*this);
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureForFullscreen);
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequirePageConsentToLoadMedia);
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureToAutoplayToExternalDevice);
#endif
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureToControlControlsManager);
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequirePlaybackToControlControlsManager);
auto& document = this->document();
auto* page = document.page();
if (document.settings().invisibleAutoplayNotPermitted())
m_mediaSession->addBehaviorRestriction(MediaElementSession::InvisibleAutoplayNotPermitted);
if (document.settings().requiresPageVisibilityToPlayAudio())
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequirePageVisibilityToPlayAudio);
if (document.ownerElement() || !document.isMediaDocument()) {
if (m_shouldVideoPlaybackRequireUserGesture) {
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureForVideoRateChange);
if (document.settings().requiresUserGestureToLoadVideo())
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureForLoad);
}
if (page && page->isLowPowerModeEnabled())
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureForVideoDueToLowPowerMode);
if (m_shouldAudioPlaybackRequireUserGesture)
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureForAudioRateChange);
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
if (m_shouldVideoPlaybackRequireUserGesture || m_shouldAudioPlaybackRequireUserGesture)
m_mediaSession->addBehaviorRestriction(MediaElementSession::RequireUserGestureToShowPlaybackTargetPicker);
#endif
if (!document.mediaDataLoadsAutomatically() && !document.quirks().needsPreloadAutoQuirk())
m_mediaSession->addBehaviorRestriction(MediaElementSession::AutoPreloadingNotPermitted);
if (document.settings().mainContentUserGestureOverrideEnabled())
m_mediaSession->addBehaviorRestriction(MediaElementSession::OverrideUserGestureRequirementForMainContent);
}
#if PLATFORM(IOS_FAMILY)
if (!document.settings().videoPlaybackRequiresUserGesture() && !document.settings().audioPlaybackRequiresUserGesture()) {
// Relax RequireUserGestureForFullscreen when videoPlaybackRequiresUserGesture and audioPlaybackRequiresUserGesture is not set:
m_mediaSession->removeBehaviorRestriction(MediaElementSession::RequireUserGestureForFullscreen);
}
#endif
registerWithDocument(document);
#if USE(AUDIO_SESSION) && PLATFORM(MAC)
AudioSession::sharedSession().addConfigurationChangeObserver(*this);
#endif
m_mediaSession->clientWillBeginAutoplaying();
}
HTMLMediaElement::~HTMLMediaElement()
{
ALWAYS_LOG(LOGIDENTIFIER);
beginIgnoringTrackDisplayUpdateRequests();
if (m_textTracks) {
for (unsigned i = 0; i < m_textTracks->length(); ++i) {
auto track = m_textTracks->item(i);
track->clearClient(*this);
}
}
if (m_audioTracks) {
for (unsigned i = 0; i < m_audioTracks->length(); ++i) {
auto track = m_audioTracks->item(i);
track->clearClient(*this);
}
}
if (m_videoTracks) {
for (unsigned i = 0; i < m_videoTracks->length(); ++i) {
auto track = m_videoTracks->item(i);
track->clearClient(*this);
}
}
allMediaElements().remove(this);
setShouldDelayLoadEvent(false);
unregisterWithDocument(document());
#if USE(AUDIO_SESSION) && PLATFORM(MAC)
AudioSession::sharedSession().removeConfigurationChangeObserver(*this);
#endif
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
if (hasEventListeners(eventNames().webkitplaybacktargetavailabilitychangedEvent) || m_remote->hasAvailabilityCallbacks()) {
m_hasPlaybackTargetAvailabilityListeners = false;
if (m_mediaSession)
m_mediaSession->setHasPlaybackTargetAvailabilityListeners(false);
updateMediaState();
}
#endif
if (m_mediaController) {
m_mediaController->removeMediaElement(*this);
m_mediaController = nullptr;
}
#if ENABLE(MEDIA_SOURCE)
detachMediaSource();
#endif
#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
webkitSetMediaKeys(nullptr);
#endif
#if ENABLE(ENCRYPTED_MEDIA)
if (m_mediaKeys) {
m_mediaKeys->detachCDMClient(*this);
if (m_player)
m_player->cdmInstanceDetached(m_mediaKeys->cdmInstance());
}
#endif
if (m_isolatedWorld)
m_isolatedWorld->clearWrappers();
m_completelyLoaded = true;
if (m_player) {
m_player->invalidate();
m_player = nullptr;
}
m_mediaSession = nullptr;
schedulePlaybackControlsManagerUpdate();
if (!m_blobURLForReading.isEmpty())
ThreadableBlobRegistry::unregisterBlobURL(m_blobURLForReading);
}
std::optional<MediaPlayerIdentifier> HTMLMediaElement::playerIdentifier() const
{
return m_player ? std::optional { m_player->identifier() } : std::nullopt;
}
RefPtr<HTMLMediaElement> HTMLMediaElement::bestMediaElementForRemoteControls(MediaElementSession::PlaybackControlsPurpose purpose, const Document* document)
{
Vector<MediaElementSessionInfo> candidateSessions;
bool atLeastOneNonCandidateMayBeConfusedForMainContent = false;
PlatformMediaSessionManager::sharedManager().forEachMatchingSession([document](auto& session) {
return is<MediaElementSession>(session)
&& (!document || &downcast<MediaElementSession>(session).element().document() == document);
}, [&](auto& session) {
auto mediaElementSessionInfo = mediaElementSessionInfoForSession(downcast<MediaElementSession>(session), purpose);
if (mediaElementSessionInfo.canShowControlsManager)
candidateSessions.append(mediaElementSessionInfo);
else if (mediaSessionMayBeConfusedWithMainContent(mediaElementSessionInfo, purpose))
atLeastOneNonCandidateMayBeConfusedForMainContent = true;
});
if (!candidateSessions.size())
return nullptr;
std::sort(candidateSessions.begin(), candidateSessions.end(), preferMediaControlsForCandidateSessionOverOtherCandidateSession);
auto strongestSessionCandidate = candidateSessions.first();
if (!strongestSessionCandidate.isVisibleInViewportOrFullscreen && !strongestSessionCandidate.isPlayingAudio && atLeastOneNonCandidateMayBeConfusedForMainContent)
return nullptr;
return &strongestSessionCandidate.session->element();
}
void HTMLMediaElement::registerWithDocument(Document& document)
{
document.registerMediaElement(*this);
mediaSession().registerWithDocument(document);
if (m_isWaitingUntilMediaCanStart)
document.addMediaCanStartListener(*this);
document.registerForVisibilityStateChangedCallbacks(*this);
if (m_requireCaptionPreferencesChangedCallbacks)
document.registerForCaptionPreferencesChangedCallbacks(*this);
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
document.registerForDocumentSuspensionCallbacks(*this);
#endif
document.addAudioProducer(*this);
}
void HTMLMediaElement::unregisterWithDocument(Document& document)
{
document.unregisterMediaElement(*this);
if (m_mediaSession)
m_mediaSession->unregisterWithDocument(document);
if (m_isWaitingUntilMediaCanStart)
document.removeMediaCanStartListener(*this);
document.unregisterForVisibilityStateChangedCallbacks(*this);
if (m_requireCaptionPreferencesChangedCallbacks)
document.unregisterForCaptionPreferencesChangedCallbacks(*this);
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
document.unregisterForDocumentSuspensionCallbacks(*this);
#endif
document.removeAudioProducer(*this);
}
void HTMLMediaElement::didMoveToNewDocument(Document& oldDocument, Document& newDocument)
{
ALWAYS_LOG(LOGIDENTIFIER);
ASSERT_WITH_SECURITY_IMPLICATION(&document() == &newDocument);
if (m_shouldDelayLoadEvent) {
oldDocument.decrementLoadEventDelayCount();
newDocument.incrementLoadEventDelayCount();
}
unregisterWithDocument(oldDocument);
registerWithDocument(newDocument);
HTMLElement::didMoveToNewDocument(oldDocument, newDocument);
updateShouldAutoplay();
visibilityStateChanged();
}
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
void HTMLMediaElement::prepareForDocumentSuspension()
{
mediaSession().unregisterWithDocument(document());
}
void HTMLMediaElement::resumeFromDocumentSuspension()
{
mediaSession().registerWithDocument(document());
updateShouldAutoplay();
}
#endif
bool HTMLMediaElement::supportsFocus() const
{
if (document().isMediaDocument())
return false;
// If no controls specified, we should still be able to focus the element if it has tabIndex.
return controls() || HTMLElement::supportsFocus();
}
bool HTMLMediaElement::isMouseFocusable() const
{
return false;
}
bool HTMLMediaElement::isInteractiveContent() const
{
return controls();
}
void HTMLMediaElement::attributeChanged(const QualifiedName& name, const AtomString& oldValue, const AtomString& newValue, AttributeModificationReason reason)
{
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
if (name == webkitwirelessvideoplaybackdisabledAttr)
mediaSession().setWirelessVideoPlaybackDisabled(newValue != nullAtom());
else
#endif
HTMLElement::attributeChanged(name, oldValue, newValue, reason);
}
void HTMLMediaElement::parseAttribute(const QualifiedName& name, const AtomString& value)
{
if (name == idAttr)
m_id = value;
if (name == srcAttr) {
// https://html.spec.whatwg.org/multipage/embedded-content.html#location-of-the-media-resource
// Location of the Media Resource
// 12 February 2017
// If a src attribute of a media element is set or changed, the user
// agent must invoke the media element's media element load algorithm.
if (!value.isNull())
prepareForLoad();
} else if (name == controlsAttr)
configureMediaControls();
else if (name == loopAttr)
updateSleepDisabling();
else if (name == preloadAttr) {
if (equalLettersIgnoringASCIICase(value, "none"_s))
m_preload = MediaPlayer::Preload::None;
else if (equalLettersIgnoringASCIICase(value, "metadata"_s))
m_preload = MediaPlayer::Preload::MetaData;
else {
// The spec does not define an "invalid value default" but "auto" is suggested as the
// "missing value default", so use it for everything except "none" and "metadata"
m_preload = MediaPlayer::Preload::Auto;
}
// The attribute must be ignored if the autoplay attribute is present
if (!autoplay() && !m_havePreparedToPlay && m_player)
m_player->setPreload(mediaSession().effectivePreloadForElement());
} else if (name == mediagroupAttr)
setMediaGroup(value);
else if (name == autoplayAttr) {
if (processingUserGestureForMedia())
removeBehaviorRestrictionsAfterFirstUserGesture();
} else if (name == titleAttr) {
if (m_mediaSession)
m_mediaSession->clientCharacteristicsChanged(false);
}
else
HTMLElement::parseAttribute(name, value);
// Changing the "muted" attribue could affect ":muted"
if (name == mutedAttr)
invalidateStyle();
}
void HTMLMediaElement::finishParsingChildren()
{
HTMLElement::finishParsingChildren();
m_parsingInProgress = false;
if (childrenOfType<HTMLTrackElement>(*this).first())
scheduleConfigureTextTracks();
}
bool HTMLMediaElement::rendererIsNeeded(const RenderStyle& style)
{
return controls() && HTMLElement::rendererIsNeeded(style);
}
RenderPtr<RenderElement> HTMLMediaElement::createElementRenderer(RenderStyle&& style, const RenderTreePosition&)
{
return createRenderer<RenderMedia>(*this, WTFMove(style));
}
bool HTMLMediaElement::childShouldCreateRenderer(const Node& child) const
{
return hasShadowRootParent(child) && HTMLElement::childShouldCreateRenderer(child);
}
Node::InsertedIntoAncestorResult HTMLMediaElement::insertedIntoAncestor(InsertionType insertionType, ContainerNode& parentOfInsertedTree)
{
ALWAYS_LOG(LOGIDENTIFIER);
HTMLElement::insertedIntoAncestor(insertionType, parentOfInsertedTree);
if (insertionType.connectedToDocument)
setInActiveDocument(true);
return InsertedIntoAncestorResult::NeedsPostInsertionCallback;
}
void HTMLMediaElement::didFinishInsertingNode()
{
Ref<HTMLMediaElement> protectedThis(*this); // prepareForLoad may result in a 'beforeload' event, which can make arbitrary DOM mutations.
ALWAYS_LOG(LOGIDENTIFIER);
if (m_inActiveDocument && m_networkState == NETWORK_EMPTY && !attributeWithoutSynchronization(srcAttr).isEmpty())
prepareForLoad();
if (!m_explicitlyMuted) {
m_explicitlyMuted = true;
m_muted = hasAttributeWithoutSynchronization(mutedAttr);
mediaSession().canProduceAudioChanged();
}
configureMediaControls();
}
void HTMLMediaElement::pauseAfterDetachedTask()
{
// If we were re-inserted into an active document, no need to pause.
if (m_inActiveDocument)
return;
if (m_videoFullscreenMode != VideoFullscreenModePictureInPicture && m_networkState > NETWORK_EMPTY && !m_wasInterruptedForInvisibleAutoplay)
pause();
if (m_videoFullscreenMode == VideoFullscreenModeStandard)
exitFullscreen();
if (!m_player)
return;
size_t extraMemoryCost = m_player->extraMemoryCost();
if (extraMemoryCost > m_reportedExtraMemoryCost) {
JSC::VM& vm = commonVM();
JSC::JSLockHolder lock(vm);
size_t extraMemoryCostDelta = extraMemoryCost - m_reportedExtraMemoryCost;
m_reportedExtraMemoryCost = extraMemoryCost;
// FIXME: Adopt reportExtraMemoryVisited, and switch to reportExtraMemoryAllocated.
// https://bugs.webkit.org/show_bug.cgi?id=142595
vm.heap.deprecatedReportExtraMemory(extraMemoryCostDelta);
}
}
void HTMLMediaElement::removedFromAncestor(RemovalType removalType, ContainerNode& oldParentOfRemovedTree)
{
ALWAYS_LOG(LOGIDENTIFIER);
setInActiveDocument(false);
if (removalType.disconnectedFromDocument) {
// Pause asynchronously to let the operation that removed us finish, in case we get inserted back into a document.
queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this] {
if (!isContextStopped())
pauseAfterDetachedTask();
});
}
if (m_mediaSession)
m_mediaSession->clientCharacteristicsChanged(false);
HTMLElement::removedFromAncestor(removalType, oldParentOfRemovedTree);
}
void HTMLMediaElement::willAttachRenderers()
{
ASSERT(!renderer());
}
inline void HTMLMediaElement::updateRenderer()
{
if (auto* renderer = this->renderer())
renderer->updateFromElement();
if (m_mediaControlsHost)
m_mediaControlsHost->updateCaptionDisplaySizes();
if (m_player)
m_player->playerContentBoxRectChanged(mediaPlayerContentBoxRect());
}
void HTMLMediaElement::didAttachRenderers()
{
if (auto* renderer = this->renderer()) {
renderer->updateFromElement();
if (m_mediaSession && m_mediaSession->wantsToObserveViewportVisibilityForAutoplay())
renderer->registerForVisibleInViewportCallback();
}
updateShouldAutoplay();
}
void HTMLMediaElement::willDetachRenderers()
{
if (auto* renderer = this->renderer())
renderer->unregisterForVisibleInViewportCallback();
}
void HTMLMediaElement::didDetachRenderers()
{
updateShouldAutoplay();
}
void HTMLMediaElement::didRecalcStyle(Style::Change)
{
updateRenderer();
}
void HTMLMediaElement::scheduleNextSourceChild()
{
// Schedule the timer to try the next <source> element WITHOUT resetting state ala prepareForLoad.
queueCancellableTaskKeepingObjectAlive(*this, TaskSource::MediaElement, m_resourceSelectionTaskCancellationGroup, std::bind(&HTMLMediaElement::loadNextSourceChild, this));
}
void HTMLMediaElement::mediaPlayerActiveSourceBuffersChanged()
{
checkForAudioAndVideo();
}
void HTMLMediaElement::scheduleEvent(const AtomString& eventName)
{
scheduleEvent(Event::create(eventName, Event::CanBubble::No, Event::IsCancelable::Yes));
}
void HTMLMediaElement::scheduleEvent(Ref<Event>&& event)
{
queueCancellableTaskToDispatchEvent(*this, TaskSource::MediaElement, m_asyncEventsCancellationGroup, WTFMove(event));
}
void HTMLMediaElement::scheduleResolvePendingPlayPromises()
{
if (m_pendingPlayPromises.isEmpty())
return;
queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this, pendingPlayPromises = WTFMove(m_pendingPlayPromises)] () mutable {
if (!isContextStopped())
resolvePendingPlayPromises(WTFMove(pendingPlayPromises));
});
}
void HTMLMediaElement::scheduleRejectPendingPlayPromises(Ref<DOMException>&& error)
{
if (m_pendingPlayPromises.isEmpty())
return;
queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this, error = WTFMove(error), pendingPlayPromises = WTFMove(m_pendingPlayPromises)] () mutable {
if (!isContextStopped())
rejectPendingPlayPromises(WTFMove(pendingPlayPromises), WTFMove(error));
});
}
void HTMLMediaElement::rejectPendingPlayPromises(PlayPromiseVector&& pendingPlayPromises, Ref<DOMException>&& error)
{
for (auto& promise : pendingPlayPromises)
promise.rejectType<IDLInterface<DOMException>>(error);
}
void HTMLMediaElement::resolvePendingPlayPromises(PlayPromiseVector&& pendingPlayPromises)
{
for (auto& promise : pendingPlayPromises)
promise.resolve();
}
void HTMLMediaElement::scheduleNotifyAboutPlaying()
{
queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this, pendingPlayPromises = WTFMove(m_pendingPlayPromises)] () mutable {
if (!isContextStopped())
notifyAboutPlaying(WTFMove(pendingPlayPromises));
});
}
void HTMLMediaElement::notifyAboutPlaying(PlayPromiseVector&& pendingPlayPromises)
{
Ref<HTMLMediaElement> protectedThis(*this); // The 'playing' event can make arbitrary DOM mutations.
m_playbackStartedTime = currentMediaTime().toDouble();
m_hasEverNotifiedAboutPlaying = true;
dispatchEvent(Event::create(eventNames().playingEvent, Event::CanBubble::No, Event::IsCancelable::Yes));
resolvePendingPlayPromises(WTFMove(pendingPlayPromises));
schedulePlaybackControlsManagerUpdate();
}
bool HTMLMediaElement::hasEverNotifiedAboutPlaying() const
{
return m_hasEverNotifiedAboutPlaying;
}
void HTMLMediaElement::scheduleCheckPlaybackTargetCompatability()
{
if (m_checkPlaybackTargetCompatibilityTaskCancellationGroup.hasPendingTask())
return;
ALWAYS_LOG(LOGIDENTIFIER);
queueCancellableTaskKeepingObjectAlive(*this, TaskSource::MediaElement, m_checkPlaybackTargetCompatibilityTaskCancellationGroup, [this] {
checkPlaybackTargetCompatibility();
});
}
void HTMLMediaElement::checkPlaybackTargetCompatibility()
{
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
if (m_isPlayingToWirelessTarget && !m_player->canPlayToWirelessPlaybackTarget()) {
static const Seconds maxIntervalForWirelessPlaybackPlayerUpdate { 500_ms };
Seconds delta = MonotonicTime::now() - m_currentPlaybackTargetIsWirelessEventFiredTime;
if (delta < maxIntervalForWirelessPlaybackPlayerUpdate) {
scheduleCheckPlaybackTargetCompatability();
return;
}
ERROR_LOG(LOGIDENTIFIER, "player incompatible after ", delta.value(), ", calling setShouldPlayToPlaybackTarget(false)");
m_failedToPlayToWirelessTarget = true;
m_player->setShouldPlayToPlaybackTarget(false);
}
#endif
}
MediaError* HTMLMediaElement::error() const
{
return m_error.get();
}
void HTMLMediaElement::setSrcObject(MediaProvider&& mediaProvider)
{
// FIXME: Setting the srcObject attribute may cause other changes to the media element's internal state:
// Specifically, if srcObject is specified, the UA must use it as the source of media, even if the src
// attribute is also set or children are present. If the value of srcObject is replaced or set to null
// the UA must re-run the media element load algorithm.
//
// https://bugs.webkit.org/show_bug.cgi?id=124896
// https://www.w3.org/TR/html51/semantics-embedded-content.html#dom-htmlmediaelement-srcobject
// 4.7.14.2. Location of the media resource
// srcObject: On setting, it must set the element’s assigned media provider object to the new
// value, and then invoke the element’s media element load algorithm.
INFO_LOG(LOGIDENTIFIER);
m_mediaProvider = WTFMove(mediaProvider);
#if ENABLE(MEDIA_STREAM)
m_mediaStreamSrcObject = nullptr;
#endif
#if ENABLE(MEDIA_SOURCE)
m_mediaSource = nullptr;
#endif
m_blob = nullptr;
prepareForLoad();
}
void HTMLMediaElement::setCrossOrigin(const AtomString& value)
{
setAttributeWithoutSynchronization(crossoriginAttr, value);
}
String HTMLMediaElement::crossOrigin() const
{
return parseCORSSettingsAttribute(attributeWithoutSynchronization(crossoriginAttr));
}
HTMLMediaElement::NetworkState HTMLMediaElement::networkState() const
{
return m_networkState;
}
String HTMLMediaElement::canPlayType(const String& mimeType) const
{
MediaEngineSupportParameters parameters;
ContentType contentType(mimeType);
parameters.type = contentType;
parameters.contentTypesRequiringHardwareSupport = mediaContentTypesRequiringHardwareSupport();
parameters.allowedMediaContainerTypes = allowedMediaContainerTypes();
parameters.allowedMediaCodecTypes = allowedMediaCodecTypes();
parameters.allowedMediaVideoCodecIDs = allowedMediaVideoCodecIDs();
parameters.allowedMediaAudioCodecIDs = allowedMediaAudioCodecIDs();
parameters.allowedMediaCaptionFormatTypes = allowedMediaCaptionFormatTypes();
MediaPlayer::SupportsType support = MediaPlayer::supportsType(parameters);
String canPlay;
// 4.8.10.3
switch (support)
{
case MediaPlayer::SupportsType::IsNotSupported:
canPlay = emptyString();
break;
case MediaPlayer::SupportsType::MayBeSupported:
canPlay = "maybe"_s;
break;
case MediaPlayer::SupportsType::IsSupported:
canPlay = "probably"_s;
break;
}
ALWAYS_LOG(LOGIDENTIFIER, mimeType, ": ", canPlay);
return canPlay;
}
WallTime HTMLMediaElement::getStartDate() const
{
if (!m_player)
return WallTime::nan();
return WallTime::fromRawSeconds(m_player->getStartDate().toDouble());
}
void HTMLMediaElement::load()
{
Ref<HTMLMediaElement> protectedThis(*this); // prepareForLoad may result in a 'beforeload' event, which can make arbitrary DOM mutations.
INFO_LOG(LOGIDENTIFIER);
if (m_videoFullscreenMode == VideoFullscreenModePictureInPicture && document().quirks().requiresUserGestureToLoadInPictureInPicture() && !document().processingUserGestureForMedia())
return;
prepareForLoad();
queueCancellableTaskKeepingObjectAlive(*this, TaskSource::MediaElement, m_resourceSelectionTaskCancellationGroup, std::bind(&HTMLMediaElement::prepareToPlay, this));
}
void HTMLMediaElement::prepareForLoad()
{
// https://html.spec.whatwg.org/multipage/embedded-content.html#media-element-load-algorithm
// The Media Element Load Algorithm
// 12 February 2017
ALWAYS_LOG(LOGIDENTIFIER, "gesture = ", processingUserGestureForMedia());
if (processingUserGestureForMedia())
removeBehaviorRestrictionsAfterFirstUserGesture();
// 1 - Abort any already-running instance of the resource selection algorithm for this element.
// Perform the cleanup required for the resource load algorithm to run.
stopPeriodicTimers();
m_resourceSelectionTaskCancellationGroup.cancel();
// FIXME: Figure out appropriate place to reset LoadTextTrackResource if necessary and set m_pendingActionFlags to 0 here.
m_sentEndEvent = false;
m_sentStalledEvent = false;
m_haveFiredLoadedData = false;
m_completelyLoaded = false;
m_havePreparedToPlay = false;
m_currentIdentifier = MediaUniqueIdentifier::generate();
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
m_failedToPlayToWirelessTarget = false;
#endif
m_loadState = WaitingForSource;
m_currentSourceNode = nullptr;
if (!document().hasBrowsingContext())
return;
createMediaPlayer();
// 2 - Let pending tasks be a list of all tasks from the media element's media element event task source in one of the task queues.
// 3 - For each task in pending tasks that would resolve pending play promises or reject pending play promises, immediately resolve or reject those promises in the order the corresponding tasks were queued.
// 4 - Remove each task in pending tasks from its task queue
cancelPendingEventsAndCallbacks();
// 5 - If the media element's networkState is set to NETWORK_LOADING or NETWORK_IDLE, queue
// a task to fire a simple event named abort at the media element.
if (m_networkState == NETWORK_LOADING || m_networkState == NETWORK_IDLE)
scheduleEvent(eventNames().abortEvent);
// 6 - If the media element's networkState is not set to NETWORK_EMPTY, then run these substeps
if (m_networkState != NETWORK_EMPTY) {
// 6.1 - Queue a task to fire a simple event named emptied at the media element.
scheduleEvent(eventNames().emptiedEvent);
// 6.2 - If a fetching process is in progress for the media element, the user agent should stop it.
m_networkState = NETWORK_EMPTY;
// 6.3 - If the media element’s assigned media provider object is a MediaSource object, then detach it.
#if ENABLE(MEDIA_SOURCE)
detachMediaSource();
#endif
// 6.4 - Forget the media element's media-resource-specific tracks.
forgetResourceSpecificTracks();
// 6.5 - If readyState is not set to HAVE_NOTHING, then set it to that state.
m_readyState = HAVE_NOTHING;
m_readyStateMaximum = HAVE_NOTHING;
// 6.6 - If the paused attribute is false, then set it to true.
setPaused(true);
// 6.7 - If seeking is true, set it to false.
clearSeeking();
// 6.8 - Set the current playback position to 0.
// Set the official playback position to 0.
// If this changed the official playback position, then queue a task to fire a simple event named timeupdate at the media element.
m_lastSeekTime = MediaTime::zeroTime();
m_playedTimeRanges = TimeRanges::create();
// FIXME: Add support for firing this event. e.g., scheduleEvent(eventNames().timeUpdateEvent);
// 4.9 - Set the initial playback position to 0.
// FIXME: Make this less subtle. The position only becomes 0 because of the createMediaPlayer() call
// above.
refreshCachedTime();
invalidateCachedTime();
// 4.10 - Set the timeline offset to Not-a-Number (NaN).
// 4.11 - Update the duration attribute to Not-a-Number (NaN).
updateMediaController();
updateActiveTextTrackCues(MediaTime::zeroTime());
}
// 7 - Set the playbackRate attribute to the value of the defaultPlaybackRate attribute.
setPlaybackRate(defaultPlaybackRate());
// 8 - Set the error attribute to null and the autoplaying flag to true.
m_error = nullptr;
m_autoplaying = true;
mediaSession().clientWillBeginAutoplaying();
if (!MediaPlayer::isAvailable())
noneSupported();
else {
// 9 - Invoke the media element's resource selection algorithm.
// Note, unless the restriction on requiring user action has been removed,
// do not begin downloading data.
if (mediaSession().dataLoadingPermitted())
selectMediaResource();
}
// 10 - Note: Playback of any previously playing media resource for this element stops.
configureMediaControls();
}
void HTMLMediaElement::mediaPlayerReloadAndResumePlaybackIfNeeded()
{
auto previousMediaTime = m_cachedTime;
bool wasPaused = paused();
load();
if (m_videoFullscreenMode != VideoFullscreenModeNone)
enterFullscreen(m_videoFullscreenMode);
if (previousMediaTime) {
queueCancellableTaskKeepingObjectAlive(*this, TaskSource::MediaElement, m_resourceSelectionTaskCancellationGroup, [this, previousMediaTime] {
if (m_player)
m_player->seekWhenPossible(previousMediaTime);
});
}
if (!wasPaused)
queueCancellableTaskKeepingObjectAlive(*this, TaskSource::MediaElement, m_resourceSelectionTaskCancellationGroup, std::bind(&HTMLMediaElement::playInternal, this));
}
void HTMLMediaElement::selectMediaResource()
{
// https://www.w3.org/TR/2016/REC-html51-20161101/semantics-embedded-content.html#resource-selection-algorithm
// The Resource Selection Algorithm
// 1. Set the element’s networkState attribute to the NETWORK_NO_SOURCE value.
m_networkState = NETWORK_NO_SOURCE;
// 2. Set the element’s show poster flag to true.
setShowPosterFlag(true);
// 3. Set the media element’s delaying-the-load-event flag to true (this delays the load event).
setShouldDelayLoadEvent(true);
// 4. in parallel await a stable state, allowing the task that invoked this algorithm to continue.
if (m_resourceSelectionTaskCancellationGroup.hasPendingTask())
return;
if (!mediaSession().pageAllowsDataLoading()) {
ALWAYS_LOG(LOGIDENTIFIER, "not allowed to load in background, waiting");
setShouldDelayLoadEvent(false);
if (m_isWaitingUntilMediaCanStart)
return;
m_isWaitingUntilMediaCanStart = true;
document().addMediaCanStartListener(*this);
return;
}
// Once the page has allowed an element to load media, it is free to load at will. This allows a
// playlist that starts in a foreground tab to continue automatically if the tab is subsequently
// put into the background.
mediaSession().removeBehaviorRestriction(MediaElementSession::RequirePageConsentToLoadMedia);
auto logSiteIdentifier = LOGIDENTIFIER;
UNUSED_PARAM(logSiteIdentifier);
queueCancellableTaskKeepingObjectAlive(*this, TaskSource::MediaElement, m_resourceSelectionTaskCancellationGroup, [this, logSiteIdentifier] {
ALWAYS_LOG(logSiteIdentifier, "lambda(), task fired");
// 5. If the media element’s blocked-on-parser flag is false, then populate the list of pending text tracks.
// HTMLMediaElement::textTracksAreReady will need "... the text tracks whose mode was not in the
// disabled state when the element's resource selection algorithm last started".
// FIXME: Update this to match "populate the list of pending text tracks" step.
m_textTracksWhenResourceSelectionBegan.clear();
if (m_textTracks) {
for (unsigned i = 0; i < m_textTracks->length(); ++i) {
RefPtr<TextTrack> track = m_textTracks->item(i);
if (track->mode() != TextTrack::Mode::Disabled)
m_textTracksWhenResourceSelectionBegan.append(track);
}
}
enum Mode { None, Object, Attribute, Children };
Mode mode = None;
if (m_mediaProvider) {
// 6. If the media element has an assigned media provider object, then let mode be object.
mode = Object;
} else if (hasAttributeWithoutSynchronization(srcAttr)) {
// Otherwise, if the media element has no assigned media provider object but has a src attribute, then let mode be attribute.
mode = Attribute;
ASSERT(m_player);
if (!m_player) {
ERROR_LOG(logSiteIdentifier, "has srcAttr but m_player is not created");
return;
}
} else if (auto firstSource = childrenOfType<HTMLSourceElement>(*this).first()) {
// Otherwise, if the media element does not have an assigned media provider object and does not have a src attribute,
// but does have a source element child, then let mode be children and let candidate be the first such source element
// child in tree order.
mode = Children;
m_nextChildNodeToConsider = firstSource;
m_currentSourceNode = nullptr;
} else {
// Otherwise the media element has no assigned media provider object and has neither a src attribute nor a source
// element child: set the networkState to NETWORK_EMPTY, and abort these steps; the synchronous section ends.
m_loadState = WaitingForSource;
setShouldDelayLoadEvent(false);
m_networkState = NETWORK_EMPTY;
ALWAYS_LOG(logSiteIdentifier, "nothing to load");
return;
}
// 7. Set the media element’s networkState to NETWORK_LOADING.
m_networkState = NETWORK_LOADING;
// 8. Queue a task to fire a simple event named loadstart at the media element.
scheduleEvent(eventNames().loadstartEvent);
// 9. Run the appropriate steps from the following list:
// ↳ If mode is object
if (mode == Object) {
// 1. Set the currentSrc attribute to the empty string.
setCurrentSrc(URL());
// 2. End the synchronous section, continuing the remaining steps in parallel.
// 3. Run the resource fetch algorithm with the assigned media provider object.
switchOn(m_mediaProvider.value(),
#if ENABLE(MEDIA_STREAM)
[this](RefPtr<MediaStream> stream) { m_mediaStreamSrcObject = stream; },
#endif
#if ENABLE(MEDIA_SOURCE)
[this](RefPtr<MediaSource> source) { m_mediaSource = source; },
#endif
[this](RefPtr<Blob> blob) { m_blob = blob; }
);
ContentType contentType;
loadResource(URL(), contentType, String());
ALWAYS_LOG(logSiteIdentifier, "using 'srcObject' property");
// If that algorithm returns without aborting this one, then the load failed.
// 4. Failed with media provider: Reaching this step indicates that the media resource
// failed to load. Queue a task to run the dedicated media source failure steps.
// 5. Wait for the task queued by the previous step to have executed.
// 6. Abort these steps. The element won’t attempt to load another resource until this
// algorithm is triggered again.
return;
}
// ↳ If mode is attribute
if (mode == Attribute) {
m_loadState = LoadingFromSrcAttr;
// 1. If the src attribute’s value is the empty string, then end the synchronous section,
// and jump down to the failed with attribute step below.
// 2. Let absolute URL be the absolute URL that would have resulted from parsing the URL
// specified by the src attribute’s value relative to the media element when the src
// attribute was last changed.
URL absoluteURL = getNonEmptyURLAttribute(srcAttr);
if (absoluteURL.isEmpty()) {
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
ALWAYS_LOG(logSiteIdentifier, "empty 'src'");
return;
}
if (!isSafeToLoadURL(absoluteURL, Complain)) {
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
return;
}
// 3. If absolute URL was obtained successfully, set the currentSrc attribute to absolute URL.
setCurrentSrc(absoluteURL);
// 4. End the synchronous section, continuing the remaining steps in parallel.
// 5. If absolute URL was obtained successfully, run the resource fetch algorithm with absolute
// URL. If that algorithm returns without aborting this one, then the load failed.
// No type or key system information is available when the url comes
// from the 'src' attribute so MediaPlayer
// will have to pick a media engine based on the file extension.
ContentType contentType;
loadResource(absoluteURL, contentType, String());
ALWAYS_LOG(logSiteIdentifier, "using 'src' attribute url");
// 6. Failed with attribute: Reaching this step indicates that the media resource failed to load
// or that the given URL could not be resolved. Queue a task to run the dedicated media source failure steps.
// 7. Wait for the task queued by the previous step to have executed.
// 8. Abort these steps. The element won’t attempt to load another resource until this algorithm is triggered again.
return;
}
// ↳ Otherwise (mode is children)
// (Ctd. in loadNextSourceChild())
loadNextSourceChild();
});
}
void HTMLMediaElement::loadNextSourceChild()
{
ContentType contentType;
String keySystem;
URL mediaURL = selectNextSourceChild(&contentType, &keySystem, Complain);
if (!mediaURL.isValid()) {
waitForSourceChange();
return;
}
// Recreate the media player for the new url
createMediaPlayer();
m_loadState = LoadingFromSourceElement;
loadResource(mediaURL, contentType, keySystem);
}
void HTMLMediaElement::loadResource(const URL& initialURL, ContentType& contentType, const String& keySystem)
{
ASSERT(initialURL.isEmpty() || isSafeToLoadURL(initialURL, Complain));
INFO_LOG(LOGIDENTIFIER, initialURL, contentType, keySystem);
RefPtr<Frame> frame = document().frame();
if (!frame) {
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
return;
}
Page* page = frame->page();
if (!page) {
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
return;
}
URL url = initialURL;
#if PLATFORM(COCOA)
if (url.isLocalFile() && !frame->loader().willLoadMediaElementURL(url, *this)) {
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
return;
}
#elif USE(GSTREAMER)
if (!url.isEmpty() && !frame->loader().willLoadMediaElementURL(url, *this)) {
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
return;
}
#endif
#if ENABLE(CONTENT_EXTENSIONS)
if (RefPtr documentLoader = frame->loader().documentLoader()) {
if (page->userContentProvider().processContentRuleListsForLoad(*page, url, ContentExtensions::ResourceType::Media, *documentLoader).summary.blockedLoad) {
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
return;
}
}
#endif
// The resource fetch algorithm
m_networkState = NETWORK_LOADING;
// If the URL should be loaded from the application cache, pass the URL of the cached file to the media engine.
ApplicationCacheResource* resource = nullptr;
if (!url.isEmpty() && frame->loader().documentLoader()->applicationCacheHost().shouldLoadResourceFromApplicationCache(ResourceRequest(url), resource)) {
// Resources that are not present in the manifest will always fail to load (at least, after the
// cache has been primed the first time), making the testing of offline applications simpler.
if (!resource || resource->path().isEmpty()) {
mediaLoadingFailed(MediaPlayer::NetworkState::NetworkError);
return;
}
}
// Log that we started loading a media element.
page->diagnosticLoggingClient().logDiagnosticMessage(isVideo() ? DiagnosticLoggingKeys::videoKey() : DiagnosticLoggingKeys::audioKey(), DiagnosticLoggingKeys::loadingKey(), ShouldSample::No);
m_firstTimePlaying = true;
// Set m_currentSrc *before* changing to the cache URL, the fact that we are loading from the app
// cache is an internal detail not exposed through the media element API.
setCurrentSrc(url);
if (resource) {
url = ApplicationCacheHost::createFileURL(resource->path());
INFO_LOG(LOGIDENTIFIER, "will load from app cache ", url);
}
INFO_LOG(LOGIDENTIFIER, "m_currentSrc is ", m_currentSrc);
startProgressEventTimer();
bool privateMode = document().page() && document().page()->usesEphemeralSession();
m_player->setPrivateBrowsingMode(privateMode);
if (!autoplay() && !m_havePreparedToPlay)
m_player->setPreload(mediaSession().effectivePreloadForElement());
m_player->setPreservesPitch(m_webkitPreservesPitch);
m_player->setPitchCorrectionAlgorithm(document().settings().pitchCorrectionAlgorithm());
if (!m_explicitlyMuted) {
m_explicitlyMuted = true;
m_muted = hasAttributeWithoutSynchronization(mutedAttr);
mediaSession().canProduceAudioChanged();
}
updateVolume();
bool loadAttempted = false;
#if ENABLE(MEDIA_SOURCE)
if (!m_mediaSource && url.protocolIs(mediaSourceBlobProtocol))
m_mediaSource = MediaSource::lookup(url.string());
if (m_mediaSource) {
loadAttempted = true;
ALWAYS_LOG(LOGIDENTIFIER, "loading MSE blob");
if (!m_mediaSource->attachToElement(*this)) {
// Forget our reference to the MediaSource, so we leave it alone
// while processing remainder of load failure.
m_mediaSource = nullptr;
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
} else if (!m_player->load(url, contentType, *m_mediaSource)) {
// We have to detach the MediaSource before we forget the reference to it.
m_mediaSource->detachFromElement(*this);
m_mediaSource = nullptr;
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
}
}
#endif
#if ENABLE(MEDIA_STREAM)
if (!loadAttempted && m_mediaStreamSrcObject) {
loadAttempted = true;
ALWAYS_LOG(LOGIDENTIFIER, "loading media stream blob ", m_mediaStreamSrcObject->logIdentifier());
if (!m_player->load(m_mediaStreamSrcObject->privateStream()))
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
}
#endif
if (!loadAttempted && m_blob) {
loadAttempted = true;
ALWAYS_LOG(LOGIDENTIFIER, "loading generic blob");
if (!m_blobURLForReading.isEmpty())
ThreadableBlobRegistry::unregisterBlobURL(m_blobURLForReading);
m_blobURLForReading = BlobURL::createPublicURL(&document().securityOrigin());
ThreadableBlobRegistry::registerBlobURL(&document().securityOrigin(), document().policyContainer(), m_blobURLForReading, m_blob->url());
if (!m_player->load(m_blobURLForReading, contentType, keySystem))
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
}
if (!loadAttempted && !m_player->load(url, contentType, keySystem))
mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
mediaPlayerRenderingModeChanged();
}
struct HTMLMediaElement::CueData {
WTF_MAKE_STRUCT_FAST_ALLOCATED;
PODIntervalTree<MediaTime, TextTrackCue*> cueTree;
CueList currentlyActiveCues;
};
static bool trackIndexCompare(TextTrack* a, TextTrack* b)
{
return a->trackIndex() - b->trackIndex() < 0;
}
static bool eventTimeCueCompare(const std::pair<MediaTime, TextTrackCue*>& a, const std::pair<MediaTime, TextTrackCue*>& b)
{
// 12 - Sort the tasks in events in ascending time order (tasks with earlier
// times first).
if (a.first != b.first)
return a.first - b.first < MediaTime::zeroTime();
// If the cues belong to different text tracks, it doesn't make sense to
// compare the two tracks by the relative cue order, so return the relative
// track order.
if (a.second->track() != b.second->track())
return trackIndexCompare(a.second->track(), b.second->track());
// 12 - Further sort tasks in events that have the same time by the
// relative text track cue order of the text track cues associated
// with these tasks.
return a.second->isOrderedBefore(b.second);
}
static bool compareCueInterval(const CueInterval& one, const CueInterval& two)
{
return one.data()->isOrderedBefore(two.data());
}
static bool compareCueIntervalEndTime(const CueInterval& one, const CueInterval& two)
{
return one.data()->endMediaTime() > two.data()->endMediaTime();
}
bool HTMLMediaElement::ignoreTrackDisplayUpdateRequests() const
{
return m_ignoreTrackDisplayUpdate > 0 || !m_textTracks || !m_cueData || m_cueData->cueTree.isEmpty();
}
void HTMLMediaElement::updateActiveTextTrackCues(const MediaTime& movieTime)
{
// 4.8.10.8 Playing the media resource
// If the current playback position changes while the steps are running,
// then the user agent must wait for the steps to complete, and then must
// immediately rerun the steps.
if (ignoreTrackDisplayUpdateRequests())
return;
// 1 - Let current cues be a list of cues, initialized to contain all the
// cues of all the hidden, showing, or showing by default text tracks of the
// media element (not the disabled ones) whose start times are less than or
// equal to the current playback position and whose end times are greater
// than the current playback position.
CueList currentCues;
// The user agent must synchronously unset [the text track cue active] flag
// whenever ... the media element's readyState is changed back to HAVE_NOTHING.
if (m_readyState != HAVE_NOTHING && m_player) {
for (auto& cue : m_cueData->cueTree.allOverlaps({ movieTime, movieTime })) {
if (cue.low() <= movieTime && cue.high() > movieTime)
currentCues.append(cue);
}
if (currentCues.size() > 1)
std::sort(currentCues.begin(), currentCues.end(), &compareCueInterval);
}
CueList previousCues;
CueList missedCues;
// 2 - Let other cues be a list of cues, initialized to contain all the cues
// of hidden, showing, and showing by default text tracks of the media
// element that are not present in current cues.
previousCues = m_cueData->currentlyActiveCues;
// 3 - Let last time be the current playback position at the time this
// algorithm was last run for this media element, if this is not the first
// time it has run.
MediaTime lastTime = m_lastTextTrackUpdateTime;
// 4 - If the current playback position has, since the last time this
// algorithm was run, only changed through its usual monotonic increase
// during normal playback, then let missed cues be the list of cues in other
// cues whose start times are greater than or equal to last time and whose
// end times are less than or equal to the current playback position.
// Otherwise, let missed cues be an empty list.
if (lastTime >= MediaTime::zeroTime() && m_lastSeekTime < movieTime) {
for (auto& cue : m_cueData->cueTree.allOverlaps({ lastTime, movieTime })) {
// Consider cues that may have been missed since the last seek time.
if (cue.low() > std::max(m_lastSeekTime, lastTime) && cue.high() < movieTime)
missedCues.append(cue);
}
}
m_lastTextTrackUpdateTime = movieTime;
// 5 - If the time was reached through the usual monotonic increase of the
// current playback position during normal playback, and if the user agent
// has not fired a timeupdate event at the element in the past 15 to 250ms
// and is not still running event handlers for such an event, then the user
// agent must queue a task to fire a simple event named timeupdate at the
// element. (In the other cases, such as explicit seeks, relevant events get
// fired as part of the overall process of changing the current playback
// position.)
if (!m_paused && m_lastSeekTime <= lastTime)
scheduleTimeupdateEvent(false);
// Explicitly cache vector sizes, as their content is constant from here.
size_t currentCuesSize = currentCues.size();
size_t missedCuesSize = missedCues.size();
size_t previousCuesSize = previousCues.size();
// 6 - If all of the cues in current cues have their text track cue active
// flag set, none of the cues in other cues have their text track cue active
// flag set, and missed cues is empty, then abort these steps.
bool activeSetChanged = missedCuesSize;
for (size_t i = 0; !activeSetChanged && i < previousCuesSize; ++i)
if (!currentCues.contains(previousCues[i]) && previousCues[i].data()->isActive())
activeSetChanged = true;
for (size_t i = 0; i < currentCuesSize; ++i) {
RefPtr<TextTrackCue> cue = currentCues[i].data();
cue->updateDisplayTree(movieTime);
if (!cue->isActive())
activeSetChanged = true;
}
MediaTime nextInterestingTime = MediaTime::invalidTime();
if (auto nearestEndingCue = std::min_element(currentCues.begin(), currentCues.end(), compareCueIntervalEndTime))
nextInterestingTime = nearestEndingCue->data()->endMediaTime();
std::optional<CueInterval> nextCue = m_cueData->cueTree.nextIntervalAfter(movieTime);
if (nextCue)
nextInterestingTime = std::min(nextInterestingTime, nextCue->low());
auto identifier = LOGIDENTIFIER;
INFO_LOG(identifier, "nextInterestingTime:", nextInterestingTime);
if (nextInterestingTime.isValid() && m_player) {
m_player->performTaskAtMediaTime([this, weakThis = WeakPtr { *this }, identifier] {
if (!weakThis)
return;
auto currentMediaTime = this->currentMediaTime();
INFO_LOG(identifier, "lambda(), currentMediaTime: ", currentMediaTime);
this->updateActiveTextTrackCues(currentMediaTime);
}, nextInterestingTime);
}
if (!activeSetChanged)
return;
// 7 - If the time was reached through the usual monotonic increase of the
// current playback position during normal playback, and there are cues in
// other cues that have their text track cue pause-on-exi flag set and that
// either have their text track cue active flag set or are also in missed
// cues, then immediately pause the media element.
for (size_t i = 0; !m_paused && i < previousCuesSize; ++i) {
if (previousCues[i].data()->pauseOnExit()
&& previousCues[i].data()->isActive()
&& !currentCues.contains(previousCues[i]))
pause();
}
for (size_t i = 0; !m_paused && i < missedCuesSize; ++i) {
if (missedCues[i].data()->pauseOnExit())
pause();
}
// 8 - Let events be a list of tasks, initially empty. Each task in this
// list will be associated with a text track, a text track cue, and a time,
// which are used to sort the list before the tasks are queued.
Vector<std::pair<MediaTime, TextTrackCue*>> eventTasks;
// 8 - Let affected tracks be a list of text tracks, initially empty.
Vector<TextTrack*> affectedTracks;
for (size_t i = 0; i < missedCuesSize; ++i) {
// 9 - For each text track cue in missed cues, prepare an event named enter
// for the TextTrackCue object with the text track cue start time.
eventTasks.append({ missedCues[i].data()->startMediaTime(), missedCues[i].data() });
// 10 - For each text track [...] in missed cues, prepare an event
// named exit for the TextTrackCue object with the with the later of
// the text track cue end time and the text track cue start time.
// Note: An explicit task is added only if the cue is NOT a zero or
// negative length cue. Otherwise, the need for an exit event is
// checked when these tasks are actually queued below. This doesn't
// affect sorting events before dispatch either, because the exit
// event has the same time as the enter event.
if (missedCues[i].data()->startMediaTime() < missedCues[i].data()->endMediaTime())
eventTasks.append({ missedCues[i].data()->endMediaTime(), missedCues[i].data() });
}
for (size_t i = 0; i < previousCuesSize; ++i) {
// 10 - For each text track cue in other cues that has its text
// track cue active flag set prepare an event named exit for the
// TextTrackCue object with the text track cue end time.
if (!currentCues.contains(previousCues[i]))
eventTasks.append({ previousCues[i].data()->endMediaTime(), previousCues[i].data() });
}
for (size_t i = 0; i < currentCuesSize; ++i) {
// 11 - For each text track cue in current cues that does not have its
// text track cue active flag set, prepare an event named enter for the
// TextTrackCue object with the text track cue start time.
if (!previousCues.contains(currentCues[i]))
eventTasks.append({ currentCues[i].data()->startMediaTime(), currentCues[i].data() });
}
// 12 - Sort the tasks in events in ascending time order (tasks with earlier
// times first).
std::sort(eventTasks.begin(), eventTasks.end(), eventTimeCueCompare);
for (auto& eventTask : eventTasks) {
if (!affectedTracks.contains(eventTask.second->track()))
affectedTracks.append(eventTask.second->track());
// 13 - Queue each task in events, in list order.
// Each event in eventTasks may be either an enterEvent or an exitEvent,
// depending on the time that is associated with the event. This
// correctly identifies the type of the event, if the startTime is
// less than the endTime in the cue.
if (eventTask.second->startTime() >= eventTask.second->endTime()) {
auto enterEvent = Event::create(eventNames().enterEvent, Event::CanBubble::No, Event::IsCancelable::No);
scheduleEventOn(*eventTask.second, WTFMove(enterEvent));
auto exitEvent = Event::create(eventNames().exitEvent, Event::CanBubble::No, Event::IsCancelable::No);
scheduleEventOn(*eventTask.second, WTFMove(exitEvent));
} else {
RefPtr<Event> event;
if (eventTask.first == eventTask.second->startMediaTime())
event = Event::create(eventNames().enterEvent, Event::CanBubble::No, Event::IsCancelable::No);
else
event = Event::create(eventNames().exitEvent, Event::CanBubble::No, Event::IsCancelable::No);
scheduleEventOn(*eventTask.second, event.releaseNonNull());
}
}
// 14 - Sort affected tracks in the same order as the text tracks appear in
// the media element's list of text tracks, and remove duplicates.
std::sort(affectedTracks.begin(), affectedTracks.end(), trackIndexCompare);
// 15 - For each text track in affected tracks, in the list order, queue a
// task to fire a simple event named cuechange at the TextTrack object, and, ...
for (auto& affectedTrack : affectedTracks) {
auto event = Event::create(eventNames().cuechangeEvent, Event::CanBubble::No, Event::IsCancelable::No);
scheduleEventOn(*affectedTrack, WTFMove(event));
// ... if the text track has a corresponding track element, to then fire a
// simple event named cuechange at the track element as well.
if (is<LoadableTextTrack>(*affectedTrack)) {
auto event = Event::create(eventNames().cuechangeEvent, Event::CanBubble::No, Event::IsCancelable::No);
RefPtr trackElement = downcast<LoadableTextTrack>(*affectedTrack).trackElement();
ASSERT(trackElement);
scheduleEventOn(*trackElement, WTFMove(event));
}
}
// 16 - Set the text track cue active flag of all the cues in the current
// cues, and unset the text track cue active flag of all the cues in the
// other cues.
for (size_t i = 0; i < currentCuesSize; ++i)
currentCues[i].data()->setIsActive(true);
for (size_t i = 0; i < previousCuesSize; ++i)
if (!currentCues.contains(previousCues[i]))
previousCues[i].data()->setIsActive(false);
// Update the current active cues.
m_cueData->currentlyActiveCues = currentCues;
if (activeSetChanged)
updateTextTrackDisplay();
}
void HTMLMediaElement::audioTrackEnabledChanged(AudioTrack& track)
{
if (m_audioTracks && m_audioTracks->contains(track))
m_audioTracks->scheduleChangeEvent();
if (processingUserGestureForMedia())
removeBehaviorRestrictionsAfterFirstUserGesture(MediaElementSession::AllRestrictions & ~MediaElementSession::RequireUserGestureToControlControlsManager);
checkForAudioAndVideo();
}
void HTMLMediaElement::audioTrackKindChanged(AudioTrack& track)
{
if (m_audioTracks && m_audioTracks->contains(track))
m_audioTracks->scheduleChangeEvent();
}
void HTMLMediaElement::audioTrackLabelChanged(AudioTrack& track)
{
if (m_audioTracks && m_audioTracks->contains(track))
m_audioTracks->scheduleChangeEvent();
}
void HTMLMediaElement::audioTrackLanguageChanged(AudioTrack& track)
{
if (m_audioTracks && m_audioTracks->contains(track))
m_audioTracks->scheduleChangeEvent();
}
void HTMLMediaElement::willRemoveAudioTrack(AudioTrack& track)
{
removeAudioTrack(track);
}
void HTMLMediaElement::textTrackModeChanged(TextTrack& track)
{
bool trackIsLoaded = true;
if (track.trackType() == TextTrack::TrackElement) {
trackIsLoaded = false;
for (auto& trackElement : childrenOfType<HTMLTrackElement>(*this)) {
if (&trackElement.track() == &track) {
if (trackElement.readyState() == HTMLTrackElement::LOADING || trackElement.readyState() == HTMLTrackElement::LOADED)
trackIsLoaded = true;
break;
}
}
}
// If this is the first added track, create the list of text tracks.
ensureTextTracks();
// Mark this track as "configured" so configureTextTracks won't change the mode again.
track.setHasBeenConfigured(true);
if (track.mode() != TextTrack::Mode::Disabled && trackIsLoaded)
textTrackAddCues(track, *track.cues());
configureTextTrackDisplay(AssumeTextTrackVisibilityChanged);
if (m_textTracks && m_textTracks->contains(track))
m_textTracks->scheduleChangeEvent();
#if ENABLE(AVF_CAPTIONS)
if (track.trackType() == TextTrack::TrackElement && m_player)
m_player->notifyTrackModeChanged();
#endif
}
void HTMLMediaElement::textTrackKindChanged(TextTrack& track)
{
if (track.kind() != TextTrack::Kind::Captions && track.kind() != TextTrack::Kind::Subtitles && track.mode() == TextTrack::Mode::Showing)
track.setMode(TextTrack::Mode::Hidden);
if (m_textTracks && m_textTracks->contains(track))
m_textTracks->scheduleChangeEvent();
}
void HTMLMediaElement::textTrackLabelChanged(TextTrack& track)
{
if (m_textTracks && m_textTracks->contains(track))
m_textTracks->scheduleChangeEvent();
}
void HTMLMediaElement::textTrackLanguageChanged(TextTrack& track)
{
if (m_textTracks && m_textTracks->contains(track))
m_textTracks->scheduleChangeEvent();
}
void HTMLMediaElement::willRemoveTextTrack(TextTrack& track)
{
if (track.trackType() == TextTrack::InBand)
removeTextTrack(track);
}
void HTMLMediaElement::videoTrackSelectedChanged(VideoTrack& track)
{
if (m_videoTracks && m_videoTracks->contains(track))
m_videoTracks->scheduleChangeEvent();
checkForAudioAndVideo();
}
void HTMLMediaElement::videoTrackKindChanged(VideoTrack& track)
{
if (m_videoTracks && m_videoTracks->contains(track))
m_videoTracks->scheduleChangeEvent();
}
void HTMLMediaElement::videoTrackLabelChanged(VideoTrack& track)
{
if (m_videoTracks && m_videoTracks->contains(track))
m_videoTracks->scheduleChangeEvent();
}
void HTMLMediaElement::videoTrackLanguageChanged(VideoTrack& track)
{
if (m_videoTracks && m_videoTracks->contains(track))
m_videoTracks->scheduleChangeEvent();
}
void HTMLMediaElement::willRemoveVideoTrack(VideoTrack& track)
{
removeVideoTrack(track);
}
void HTMLMediaElement::beginIgnoringTrackDisplayUpdateRequests()
{
++m_ignoreTrackDisplayUpdate;
}
void HTMLMediaElement::endIgnoringTrackDisplayUpdateRequests()
{
ASSERT(m_ignoreTrackDisplayUpdate);
--m_ignoreTrackDisplayUpdate;
queueCancellableTaskKeepingObjectAlive(*this, TaskSource::MediaElement, m_updateTextTracksTaskCancellationGroup, [this] {
if (!m_ignoreTrackDisplayUpdate && m_inActiveDocument)
updateActiveTextTrackCues(currentMediaTime());
});
}
void HTMLMediaElement::textTrackAddCues(TextTrack& track, const TextTrackCueList& cues)
{
if (track.mode() == TextTrack::Mode::Disabled)
return;
TrackDisplayUpdateScope scope { *this };
for (unsigned i = 0; i < cues.length(); ++i)
textTrackAddCue(track, *cues.item(i));
}
void HTMLMediaElement::textTrackRemoveCues(TextTrack&, const TextTrackCueList& cues)
{
TrackDisplayUpdateScope scope { *this };
for (unsigned i = 0; i < cues.length(); ++i) {
auto& cue = *cues.item(i);
textTrackRemoveCue(*cue.track(), cue);
}
}
void HTMLMediaElement::textTrackAddCue(TextTrack& track, TextTrackCue& cue)
{
if (track.mode() == TextTrack::Mode::Disabled)
return;
if (!m_cueData)
m_cueData = makeUnique<CueData>();
// Negative duration cues need be treated in the interval tree as
// zero-length cues.
MediaTime endTime = std::max(cue.startMediaTime(), cue.endMediaTime());
CueInterval interval(cue.startMediaTime(), endTime, &cue);
if (!m_cueData->cueTree.contains(interval))
m_cueData->cueTree.add(interval);
updateActiveTextTrackCues(currentMediaTime());
}
void HTMLMediaElement::textTrackRemoveCue(TextTrack&, TextTrackCue& cue)
{
if (!m_cueData)
m_cueData = makeUnique<CueData>();
// Negative duration cues need to be treated in the interval tree as
// zero-length cues.
MediaTime endTime = std::max(cue.startMediaTime(), cue.endMediaTime());
CueInterval interval(cue.startMediaTime(), endTime, &cue);
m_cueData->cueTree.remove(interval);
// Since the cue will be removed from the media element and likely the
// TextTrack might also be destroyed, notifying the region of the cue
// removal shouldn't be done.
auto isVTT = is<VTTCue>(cue);
if (isVTT)
downcast<VTTCue>(cue).notifyRegionWhenRemovingDisplayTree(false);
size_t index = m_cueData->currentlyActiveCues.find(interval);
if (index != notFound) {
cue.setIsActive(false);
m_cueData->currentlyActiveCues.remove(index);
}
cue.removeDisplayTree();
updateActiveTextTrackCues(currentMediaTime());
if (isVTT)
downcast<VTTCue>(cue).notifyRegionWhenRemovingDisplayTree(true);
}
CueList HTMLMediaElement::currentlyActiveCues() const
{
if (!m_cueData)
return { };
return m_cueData->currentlyActiveCues;
}
static inline bool isAllowedToLoadMediaURL(HTMLMediaElement& element, const URL& url, bool isInUserAgentShadowTree)
{
// Elements in user agent show tree should load whatever the embedding document policy is.
if (isInUserAgentShadowTree)
return true;
ASSERT(element.document().contentSecurityPolicy());
return element.document().contentSecurityPolicy()->allowMediaFromSource(url);
}
bool HTMLMediaElement::isSafeToLoadURL(const URL& url, InvalidURLAction actionIfInvalid)
{
if (!url.isValid()) {
ERROR_LOG(LOGIDENTIFIER, url, " is invalid");
return false;
}
RefPtr<Frame> frame = document().frame();
if (!frame || !document().securityOrigin().canDisplay(url)) {
if (actionIfInvalid == Complain) {
FrameLoader::reportLocalLoadFailed(frame.get(), url.stringCenterEllipsizedToLength());
ERROR_LOG(LOGIDENTIFIER, url , " was rejected by SecurityOrigin");
}
return false;
}
if (!portAllowed(url)) {
if (actionIfInvalid == Complain) {
if (frame)
FrameLoader::reportBlockedLoadFailed(*frame, url);
ERROR_LOG(LOGIDENTIFIER, url , " was rejected because the port is not allowed");
}
return false;
}
if (!isAllowedToLoadMediaURL(*this, url, isInUserAgentShadowTree())) {
ERROR_LOG(LOGIDENTIFIER, url, " was rejected by Content Security Policy");
return false;
}
return true;
}
void HTMLMediaElement::startProgressEventTimer()
{
if (m_progressEventTimer.isActive())
return;
m_previousProgressTime = MonotonicTime::now();
// 350ms is not magic, it is in the spec!
m_progressEventTimer.startRepeating(350_ms);
}
void HTMLMediaElement::waitForSourceChange()
{
ALWAYS_LOG(LOGIDENTIFIER);
stopPeriodicTimers();
m_loadState = WaitingForSource;
// 6.17 - Waiting: Set the element's networkState attribute to the NETWORK_NO_SOURCE value
m_networkState = NETWORK_NO_SOURCE;
// 6.18 - Set the element's show poster flag to true.
setShowPosterFlag(true);
// 6.19 - Queue a media element task given the media element given the element to set the
// element's delaying-the-load-event flag to false. This stops delaying the load event.
// FIXME: this should be done in a task queue
setShouldDelayLoadEvent(false);
updateRenderer();
}
void HTMLMediaElement::noneSupported()
{
if (m_error)
return;
ALWAYS_LOG(LOGIDENTIFIER);
stopPeriodicTimers();
m_loadState = WaitingForSource;
m_currentSourceNode = nullptr;
// 4.8.10.5
// 6 - Reaching this step indicates that the media resource failed to load or that the given
// URL could not be resolved. In one atomic operation, run the following steps:
// 6.1 - Set the error attribute to a new MediaError object whose code attribute is set to
// MEDIA_ERR_SRC_NOT_SUPPORTED.
m_error = m_player
? MediaError::create(MediaError::MEDIA_ERR_SRC_NOT_SUPPORTED, m_player->lastErrorMessage())
: MediaError::create(MediaError::MEDIA_ERR_SRC_NOT_SUPPORTED, "Unsupported source type"_s);
// 6.2 - Forget the media element's media-resource-specific text tracks.
forgetResourceSpecificTracks();
// 6.3 - Set the element's networkState attribute to the NETWORK_NO_SOURCE value.
m_networkState = NETWORK_NO_SOURCE;
// 6.4 - Set the element's show poster flag to true.
setShowPosterFlag(true);
// 7 - Queue a task to fire a simple event named error at the media element.
scheduleEvent(eventNames().errorEvent);
rejectPendingPlayPromises(WTFMove(m_pendingPlayPromises), DOMException::create(NotSupportedError));
#if ENABLE(MEDIA_SOURCE)
detachMediaSource();
#endif
// 8 - Set the element's delaying-the-load-event flag to false. This stops delaying the load event.
setShouldDelayLoadEvent(false);
// 9 - Abort these steps. Until the load() method is invoked or the src attribute is changed,
// the element won't attempt to load another resource.
updateRenderer();
}
void HTMLMediaElement::mediaLoadingFailedFatally(MediaPlayer::NetworkState error)
{
// https://html.spec.whatwg.org/#loading-the-media-resource:dom-media-have_nothing-2
// 17 March 2021
// 1 - The user agent should cancel the fetching process.
stopPeriodicTimers();
m_loadState = WaitingForSource;
const auto getErrorMessage = [&] (String&& defaultMessage) {
String message = WTFMove(defaultMessage);
if (!m_player)
return message;
auto lastErrorMessage = m_player->lastErrorMessage();
if (!lastErrorMessage)
return message;
return makeString(message, ": ", lastErrorMessage);
};
// 2 - Set the error attribute to a new MediaError object whose code attribute is
// set to MEDIA_ERR_NETWORK/MEDIA_ERR_DECODE.
if (error == MediaPlayer::NetworkState::NetworkError)
m_error = MediaError::create(MediaError::MEDIA_ERR_NETWORK, getErrorMessage("Media failed to load"_s));
else if (error == MediaPlayer::NetworkState::DecodeError)
m_error = MediaError::create(MediaError::MEDIA_ERR_DECODE, getErrorMessage("Media failed to decode"_s));
else
ASSERT_NOT_REACHED();
#if ENABLE(MEDIA_SOURCE)
detachMediaSource();
#endif
// 3 - Set the element's networkState attribute to the NETWORK_IDLE value.
m_networkState = NETWORK_IDLE;
// 4 - Set the element's delaying-the-load-event flag to false. This stops delaying the load event.
setShouldDelayLoadEvent(false);
// 5 - Fire an event named error at the media element.
scheduleEvent(eventNames().errorEvent);
// 6 - Abort the overall resource selection algorithm.
m_currentSourceNode = nullptr;
}
void HTMLMediaElement::cancelPendingEventsAndCallbacks()
{
INFO_LOG(LOGIDENTIFIER);
m_asyncEventsCancellationGroup.cancel();
for (auto& source : childrenOfType<HTMLSourceElement>(*this))
source.cancelPendingErrorEvent();
rejectPendingPlayPromises(WTFMove(m_pendingPlayPromises), DOMException::create(AbortError));
}
void HTMLMediaElement::mediaPlayerNetworkStateChanged()
{
beginProcessingMediaPlayerCallback();
setNetworkState(m_player->networkState());
endProcessingMediaPlayerCallback();
}
static void logMediaLoadRequest(Page* page, const String& mediaEngine, const String& errorMessage, bool succeeded)
{
if (!page)
return;
DiagnosticLoggingClient& diagnosticLoggingClient = page->diagnosticLoggingClient();
if (!succeeded) {
diagnosticLoggingClient.logDiagnosticMessageWithResult(DiagnosticLoggingKeys::mediaLoadingFailedKey(), errorMessage, DiagnosticLoggingResultFail, ShouldSample::No);
return;
}
diagnosticLoggingClient.logDiagnosticMessage(DiagnosticLoggingKeys::mediaLoadedKey(), mediaEngine, ShouldSample::No);
if (!page->hasSeenAnyMediaEngine())
diagnosticLoggingClient.logDiagnosticMessage(DiagnosticLoggingKeys::pageContainsAtLeastOneMediaEngineKey(), emptyString(), ShouldSample::No);
if (!page->hasSeenMediaEngine(mediaEngine))
diagnosticLoggingClient.logDiagnosticMessage(DiagnosticLoggingKeys::pageContainsMediaEngineKey(), mediaEngine, ShouldSample::No);
page->sawMediaEngine(mediaEngine);
}
void HTMLMediaElement::mediaLoadingFailed(MediaPlayer::NetworkState error)
{
stopPeriodicTimers();
// If we failed while trying to load a <source> element, the movie was never parsed, and there are more
// <source> children, schedule the next one
if (m_readyState < HAVE_METADATA && m_loadState == LoadingFromSourceElement) {
// resource selection algorithm
// Step 9.Otherwise.9 - Failed with elements: Queue a task, using the DOM manipulation task source, to fire a simple event named error at the candidate element.
if (m_currentSourceNode)
m_currentSourceNode->scheduleErrorEvent();
else
ALWAYS_LOG(LOGIDENTIFIER, "error event not sent, <source> was removed");
// 9.Otherwise.10 - Asynchronously await a stable state. The synchronous section consists of all the remaining steps of this algorithm until the algorithm says the synchronous section has ended.
// 9.Otherwise.11 - Forget the media element's media-resource-specific tracks.
forgetResourceSpecificTracks();
if (havePotentialSourceChild()) {
ALWAYS_LOG(LOGIDENTIFIER, "scheduling next <source>");
scheduleNextSourceChild();
} else {
ALWAYS_LOG(LOGIDENTIFIER, "no more <source> elements, waiting");
waitForSourceChange();
}
return;
}
if ((error == MediaPlayer::NetworkState::NetworkError && m_readyState >= HAVE_METADATA) || error == MediaPlayer::NetworkState::DecodeError)
mediaLoadingFailedFatally(error);
else if ((error == MediaPlayer::NetworkState::FormatError || error == MediaPlayer::NetworkState::NetworkError) && m_loadState == LoadingFromSrcAttr)
noneSupported();
ERROR_LOG(LOGIDENTIFIER, "error = ", static_cast<int>(error));
logMediaLoadRequest(document().page(), String(), convertEnumerationToString(error), false);
mediaSession().clientCharacteristicsChanged(false);
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
if (!m_hasPlaybackTargetAvailabilityListeners)
mediaSession().setActive(false);
#else
mediaSession().setActive(false);
#endif
}
void HTMLMediaElement::setNetworkState(MediaPlayer::NetworkState state)
{
if (static_cast<int>(state) != static_cast<int>(m_networkState))
ALWAYS_LOG(LOGIDENTIFIER, "new state = ", state, ", current state = ", m_networkState);
if (state == MediaPlayer::NetworkState::Empty) {
// Just update the cached state and leave, we can't do anything.
m_networkState = NETWORK_EMPTY;
invalidateStyle();
return;
}
if (state == MediaPlayer::NetworkState::FormatError || state == MediaPlayer::NetworkState::NetworkError || state == MediaPlayer::NetworkState::DecodeError) {
mediaLoadingFailed(state);
return;
}
if (state == MediaPlayer::NetworkState::Idle) {
if (m_networkState > NETWORK_IDLE) {
changeNetworkStateFromLoadingToIdle();
setShouldDelayLoadEvent(false);
} else {
m_networkState = NETWORK_IDLE;
}
}
if (state == MediaPlayer::NetworkState::Loading) {
if (m_networkState < NETWORK_LOADING || m_networkState == NETWORK_NO_SOURCE)
startProgressEventTimer();
m_networkState = NETWORK_LOADING;
}
if (state == MediaPlayer::NetworkState::Loaded) {
if (m_networkState != NETWORK_IDLE)
changeNetworkStateFromLoadingToIdle();
m_completelyLoaded = true;
}
invalidateStyle();
}
void HTMLMediaElement::changeNetworkStateFromLoadingToIdle()
{
m_progressEventTimer.stop();
// Schedule one last progress event so we guarantee that at least one is fired
// for files that load very quickly.
scheduleEvent(eventNames().progressEvent);
scheduleEvent(eventNames().suspendEvent);
m_networkState = NETWORK_IDLE;
}
void HTMLMediaElement::mediaPlayerReadyStateChanged()
{
if (isSuspended()) {
queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this] {
mediaPlayerReadyStateChanged();
});
return;
}
beginProcessingMediaPlayerCallback();
setReadyState(m_player->readyState());
endProcessingMediaPlayerCallback();
}
Expected<void, MediaPlaybackDenialReason> HTMLMediaElement::canTransitionFromAutoplayToPlay() const
{
if (m_readyState != HAVE_ENOUGH_DATA) {
ALWAYS_LOG(LOGIDENTIFIER, "m_readyState != HAVE_ENOUGH_DATA");
return makeUnexpected(MediaPlaybackDenialReason::PageConsentRequired);
}
if (!isAutoplaying()) {
ALWAYS_LOG(LOGIDENTIFIER, "!isAutoplaying");
return makeUnexpected(MediaPlaybackDenialReason::PageConsentRequired);
}
if (!mediaSession().autoplayPermitted()) {
ALWAYS_LOG(LOGIDENTIFIER, "!mediaSession().autoplayPermitted");
return makeUnexpected(MediaPlaybackDenialReason::PageConsentRequired);
}
if (!paused()) {
ALWAYS_LOG(LOGIDENTIFIER, "!paused");
return makeUnexpected(MediaPlaybackDenialReason::PageConsentRequired);
}
if (!autoplay()) {
ALWAYS_LOG(LOGIDENTIFIER, "!autoplay");
return makeUnexpected(MediaPlaybackDenialReason::PageConsentRequired);
}
if (pausedForUserInteraction()) {
ALWAYS_LOG(LOGIDENTIFIER, "pausedForUserInteraction");
return makeUnexpected(MediaPlaybackDenialReason::PageConsentRequired);
}
if (document().isSandboxed(SandboxAutomaticFeatures)) {
ALWAYS_LOG(LOGIDENTIFIER, "isSandboxed");
return makeUnexpected(MediaPlaybackDenialReason::PageConsentRequired);
}
auto permitted = mediaSession().playbackStateChangePermitted(MediaPlaybackState::Playing);
#if !RELEASE_LOG_DISABLED
if (!permitted)
ALWAYS_LOG(LOGIDENTIFIER, permitted.error());
else
ALWAYS_LOG(LOGIDENTIFIER, "can transition!");
#endif
return permitted;
}
void HTMLMediaElement::dispatchPlayPauseEventsIfNeedsQuirks()
{
if (!document().quirks().needsAutoplayPlayPauseEvents())
return;
ALWAYS_LOG(LOGIDENTIFIER);
scheduleEvent(eventNames().playingEvent);
scheduleEvent(eventNames().pauseEvent);
}
void HTMLMediaElement::durationChanged()
{
if (m_textTracks)
m_textTracks->setDuration(durationMediaTime());
scheduleEvent(eventNames().durationchangeEvent);
}
void HTMLMediaElement::setReadyState(MediaPlayer::ReadyState state)
{
// Set "wasPotentiallyPlaying" BEFORE updating m_readyState, potentiallyPlaying() uses it
bool wasPotentiallyPlaying = potentiallyPlaying();
ReadyState oldState = m_readyState;
ReadyState newState = static_cast<ReadyState>(state);
bool tracksAreReady = textTracksAreReady();
if (newState == oldState && m_tracksAreReady == tracksAreReady)
return;
m_tracksAreReady = tracksAreReady;
ALWAYS_LOG(LOGIDENTIFIER, "new state = ", state, ", current state = ", m_readyState);
if (tracksAreReady)
m_readyState = newState;
else {
// If a media file has text tracks the readyState may not progress beyond HAVE_FUTURE_DATA until
// the text tracks are ready, regardless of the state of the media file.
if (newState <= HAVE_METADATA)
m_readyState = newState;
else
m_readyState = HAVE_CURRENT_DATA;
}
if (oldState > m_readyStateMaximum)
m_readyStateMaximum = oldState;
if (m_networkState == NETWORK_EMPTY)
return;
if (m_seeking) {
// 4.8.10.9, step 11
if (wasPotentiallyPlaying && m_readyState < HAVE_FUTURE_DATA)
scheduleEvent(eventNames().waitingEvent);
// 4.8.10.10 step 14 & 15.
if (m_seekRequested && !m_player->seeking() && m_readyState >= HAVE_CURRENT_DATA)
finishSeek();
} else {
if (wasPotentiallyPlaying && m_readyState < HAVE_FUTURE_DATA) {
// 4.8.10.8
invalidateCachedTime();
scheduleTimeupdateEvent(false);
scheduleEvent(eventNames().waitingEvent);
}
}
// Apply the first applicable set of substeps from the following list:
do {
// FIXME: The specification seems to only say HAVE_METADATA
// explicitly (rather than or higher) for this state. It's unclear
// if/how things like loadedmetadataEvent should happen if
// we go directly from below HAVE_METADATA to higher than
// HAVE_METADATA.
if (m_readyState >= HAVE_METADATA && oldState < HAVE_METADATA) {
prepareMediaFragmentURI();
durationChanged();
scheduleResizeEvent();
scheduleEvent(eventNames().loadedmetadataEvent);
if (m_defaultPlaybackStartPosition > MediaTime::zeroTime()) {
// We reset it before to cause currentMediaTime() to return the actual current time (not
// defaultPlaybackPosition) and avoid the seek code to think that the seek was already done.
MediaTime seekTarget = m_defaultPlaybackStartPosition;
m_defaultPlaybackStartPosition = MediaTime::zeroTime();
seekInternal(seekTarget);
}
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
if (hasEventListeners(eventNames().webkitplaybacktargetavailabilitychangedEvent))
enqueuePlaybackTargetAvailabilityChangedEvent();
#endif
m_initiallyMuted = m_volume < 0.05 || muted();
updateRenderer();
if (is<MediaDocument>(document()))
downcast<MediaDocument>(document()).mediaElementNaturalSizeChanged(expandedIntSize(m_player->naturalSize()));
logMediaLoadRequest(document().page(), m_player->engineDescription(), String(), true);
#if ENABLE(WIRELESS_PLAYBACK_TARGET)
scheduleUpdateMediaState();
#endif
mediaSession().clientCharacteristicsChanged(false);
// As the spec only mentiones HAVE_METADATA, run the later
// steps if we are moving to a higher state.
if (m_readyState == HAVE_METADATA)
break;
}
if (m_readyState >= HAVE_CURRENT_DATA && oldState < HAVE_CURRENT_DATA) {
if (!m_haveFiredLoadedData) {
m_haveFiredLoadedData = true;
scheduleEvent(eventNames().loadeddataEvent);
// FIXME: It's not clear that it's correct to skip these this operation just
// because m_haveFiredLoadedData is already true. At one time we were skipping
// the call to setShouldDelayLoadEvent, which was definitely incorrect.
applyMediaFragmentURI();
}
setShouldDelayLoadEvent(false);
// If the new ready state is HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA, then the relevant steps below must then be run also.
if (m_readyState < HAVE_FUTURE_DATA)
break;
}
if (!tracksAreReady)
break;
if (m_readyState == HAVE_FUTURE_DATA && oldState <= HAVE_CURRENT_DATA) {
scheduleEvent(eventNames().canplayEvent);
// If the element’s paused attribute is false, the user agent must queue a task to fire a simple event named playing at the element.
if (!paused())
scheduleNotifyAboutPlaying();
break;
}
if (m_readyState == HAVE_ENOUGH_DATA && oldState < HAVE_ENOUGH_DATA) {
// If the previous ready state was HAVE_CURRENT_DATA or less,
// the user agent must queue a media element task given the media element to fire an event named canplay at the element,
// and, if the element's paused attribute is false, notify about playing for the element.
if (oldState <= HAVE_CURRENT_DATA) {
scheduleEvent(eventNames().canplayEvent);
if (!paused())
scheduleNotifyAboutPlaying();
}
// The user agent must queue a media element task given the media element to fire an event named canplaythrough at the element.
scheduleEvent(eventNames().canplaythroughEvent);
// If the element is not eligible for autoplay, then the user agent must abort these substeps.
// The user agent may run the following substeps:
// Set the paused attribute to false.
// If the element's show poster flag is true, set it to false and run the time marches on steps.
// Queue a media element task given the element to fire an event named play at the element.
// Notify about playing for the element.
auto canTransition = canTransitionFromAutoplayToPlay();
if (canTransition) {
setPaused(false);
setShowPosterFlag(false);
invalidateCachedTime();
setAutoplayEventPlaybackState(AutoplayEventPlaybackState::StartedWithoutUserGesture);
m_playbackStartedTime = currentMediaTime().toDouble();
scheduleEvent(eventNames().playEvent);
scheduleNotifyAboutPlaying();
} else if (canTransition.error() == MediaPlaybackDenialReason::UserGestureRequired) {
ALWAYS_LOG(LOGIDENTIFIER, "Autoplay blocked, user gesture required");
setAutoplayEventPlaybackState(AutoplayEventPlaybackState::PreventedAutoplay);
}
}
} while (false);
// If we transition to the Future Data state and we're about to begin playing, ensure playback is actually permitted first,
// honoring any playback denial reasons such as the requirement of a user gesture.
if (m_readyState == HAVE_FUTURE_DATA && oldState < HAVE_FUTURE_DATA && potentiallyPlaying() && !mediaSession().playbackStateChangePermitted(MediaPlaybackState::Playing)) {
auto canTransition = canTransitionFromAutoplayToPlay();
if (!canTransition && canTransition.error() == MediaPlaybackDenialReason::UserGestureRequired)
ALWAYS_LOG(LOGIDENTIFIER, "Autoplay blocked, user gesture required");
pauseInternal();
setAutoplayEventPlaybackState(AutoplayEventPlaybackState::PreventedAutoplay);
}
updatePlayState();
updateMediaController();
updateActiveTextTrackCues(currentMediaTime());
invalidateStyle();
}
#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
#if ENABLE(ENCRYPTED_MEDIA)
void HTMLMediaElement::updateShouldContinueAfterNeedKey()
{
if (!m_player)
return;
bool shouldContinue = hasEventListeners(eventNames().webkitneedkeyEvent) || (document().settings().encryptedMediaAPIEnabled() && !document().quirks().hasBrokenEncryptedMediaAPISupportQuirk());
m_player->setShouldContinueAfterKeyNeeded(shouldContinue);
}
#endif
RefPtr<ArrayBuffer> HTMLMediaElement::mediaPlayerCachedKeyForKeyId(const String& keyId) const
{
return m_webKitMediaKeys ? m_webKitMediaKeys->cachedKeyForKeyId(keyId) : nullptr;
}
void HTMLMediaElement::mediaPlayerKeyNeeded(const SharedBuffer& initData)
{
if (!document().settings().legacyEncryptedMediaAPIEnabled())
return;
if (!hasEventListeners(eventNames().webkitneedkeyEvent)
#if ENABLE(ENCRYPTED_MEDIA)
// Only fire an error if ENCRYPTED_MEDIA is not enabled, to give clients of the
// "encrypted" event a chance to handle it without resulting in a synthetic error.
&& (!document().settings().encryptedMediaAPIEnabled() || document().quirks().hasBrokenEncryptedMediaAPISupportQuirk())
#endif
) {
m_error = MediaError::create(MediaError::MEDIA_ERR_ENCRYPTED, "Media is encrypted"_s);
scheduleEvent(eventNames().errorEvent);
return;
}
WebKitMediaKeyNeededEvent::Init init;
if (auto initDataBuffer = initData.tryCreateArrayBuffer()) {
auto byteLength = initDataBuffer->byteLength();
init.initData = Uint8Array::tryCreate(initDataBuffer.releaseNonNull(), 0, byteLength);
}
auto event = WebKitMediaKeyNeededEvent::create(eventNames().webkitneedkeyEvent, init);
scheduleEvent(WTFMove(event));
}
String HTMLMediaElement::mediaPlayerMediaKeysStorageDirectory() const
{
auto* page = document().page();
if (!page || page->usesEphemeralSession())
return emptyString();
String storageDirectory = document().settings().mediaKeysStorageDirectory();
if (storageDirectory.isEmpty())
return emptyString();
return FileSystem::pathByAppendingComponent(storageDirectory, document().securityOrigin().data().databaseIdentifier());
}
void HTMLMediaElement::webkitSetMediaKeys(WebKitMediaKeys* mediaKeys)
{
if (!document().settings().legacyEncryptedMediaAPIEnabled())
return;
if (m_webKitMediaKeys == mediaKeys)
return;
if (m_webKitMediaKeys)
m_webKitMediaKeys->setMediaElement(nullptr);
m_webKitMediaKeys = mediaKeys;
if (m_webKitMediaKeys)
m_webKitMediaKeys->setMediaElement(this);
}
void HTMLMediaElement::keyAdded()
{
if (!document().settings().legacyEncryptedMediaAPIEnabled())
return;
if (m_player)
m_player->keyAdded();
}
#endif
#if ENABLE(ENCRYPTED_MEDIA)
MediaKeys* HTMLMediaElement::mediaKeys() const
{
return m_mediaKeys.get();
}
void HTMLMediaElement::setMediaKeys(MediaKeys* mediaKeys, Ref<DeferredPromise>&& promise)
{
// https://w3c.github.io/encrypted-media/#dom-htmlmediaelement-setmediakeys
// W3C Editor's Draft 23 June 2017
// 1. If this object's attaching media keys value is true, return a promise rejected with an InvalidStateError.
if (m_attachingMediaKeys) {
promise->reject(InvalidStateError);
return;
}
// 2. If mediaKeys and the mediaKeys attribute are the same object, return a resolved promise.
if (mediaKeys == m_mediaKeys) {
promise->resolve();
return;
}
// 3. Let this object's attaching media keys value be true.
m_attachingMediaKeys = true;
// 4. Let promise be a new promise.
// 5. Run the following steps in parallel:
queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this, mediaKeys = RefPtr<MediaKeys>(mediaKeys), promise = WTFMove(promise)]() mutable {
if (isContextStopped())
return;
// 5.1. If all the following conditions hold:
// - mediaKeys is not null,
// - the CDM instance represented by mediaKeys is already in use by another media element
// - the user agent is unable to use it with this element
// then let this object's attaching media keys value be false and reject promise with a QuotaExceededError.
// FIXME: ^
// 5.2. If the mediaKeys attribute is not null, run the following steps:
if (m_mediaKeys) {
// 5.2.1. If the user agent or CDM do not support removing the association, let this object's attaching media keys value be false and reject promise with a NotSupportedError.
// 5.2.2. If the association cannot currently be removed, let this object's attaching media keys value be false and reject promise with an InvalidStateError.
// 5.2.3. Stop using the CDM instance represented by the mediaKeys attribute to decrypt media data and remove the association with the media element.
// 5.2.4. If the preceding step failed, let this object's attaching media keys value be false and reject promise with the appropriate error name.
// FIXME: ^
m_mediaKeys->detachCDMClient(*this);
if (m_player)
m_player->cdmInstanceDetached(m_mediaKeys->cdmInstance());
}
// 5.3. If mediaKeys is not null, run the following steps:
if (mediaKeys) {
// 5.3.1. Associate the CDM instance represented by mediaKeys with the media element for decrypting media data.
mediaKeys->attachCDMClient(*this);
if (m_player)
m_player->cdmInstanceAttached(mediaKeys->cdmInstance());
// 5.3.2. If the preceding step failed, run the following steps:
// 5.3.2.1. Set the mediaKeys attribute to null.
// 5.3.2.2. Let this object's attaching media keys value be false.
// 5.3.2.3. Reject promise with a new DOMException whose name is the appropriate error name.
// FIXME: ^
// 5.3.3. Queue a task to run the Attempt to Resume Playback If Necessary algorithm on the media element.
queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this] {
if (!isContextStopped())
attemptToResumePlaybackIfNecessary();
});
}
// 5.4. Set the mediaKeys attribute to mediaKeys.
// 5.5. Let this object's attaching media keys value be false.
// 5.6. Resolve promise.
m_mediaKeys = WTFMove(mediaKeys);
m_attachingMediaKeys = false;
promise->resolve();
});
// 6. Return promise.
}
void HTMLMediaElement::mediaPlayerInitializationDataEncountered(const String& initDataType, RefPtr<ArrayBuffer>&& initData)
{
if (!document().settings().encryptedMediaAPIEnabled() || document().quirks().hasBrokenEncryptedMediaAPISupportQuirk())
return;
// https://w3c.github.io/encrypted-media/#initdata-encountered
// W3C Editor's Draft 23 June 2017
// 1. Let the media element be the specified HTMLMediaElement object.
// 2. Let initDataType be the empty string.
// 3. Let initData be null.
// 4. If the media data is CORS-same-origin and not mixed content, run the following steps:
// 4.1. Let initDataType be the string representing the Initialization Data Type of the Initialization Data.
// 4.2. Let initData be the Initialization Data.
// FIXME: ^
// 5. Queue a task to create an event named encrypted that does not bubble and is not cancellable using the
// MediaEncryptedEvent interface with its type attribute set to encrypted and its isTrusted attribute
// initialized to true, and dispatch it at the media element.
// The event interface MediaEncryptedEvent has:
// initDataType = initDataType
// initData = initData
MediaEncryptedEventInit initializer { initDataType, WTFMove(initData) };
scheduleEvent(MediaEncryptedEvent::create(eventNames().encryptedEvent, initializer, Event::IsTrusted::Yes));
}
void HTMLMediaElement::mediaPlayerWaitingForKeyChanged()
{
if (!m_player)
return;
if (!m_player->waitingForKey() && m_playbackBlockedWaitingForKey) {
// https://w3c.github.io/encrypted-media/#resume-playback
// W3C Editor's Draft 23 June 2017
// NOTE: continued from HTMLMediaElement::attemptToDecrypt().
// 4. If the user agent can advance the current playback position in the direction of playback:
// 4.1. Set the media element's decryption blocked waiting for key value to false.
// FIXME: ^
// 4.2. Set the media element's playback blocked waiting for key value to false.
m_playbackBlockedWaitingForKey = false;
// 4.3. Set the media element's readyState value to HAVE_CURRENT_DATA, HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA as appropriate.
setReadyState(m_player->readyState());
return;
}
// https://www.w3.org/TR/encrypted-media/#wait-for-key
// W3C Recommendation 18 September 2017
// The Wait for Key algorithm queues a waitingforkey event and
// updates readyState. It should only be called when the
// HTMLMediaElement object is potentially playing and its
// readyState is equal to HAVE_FUTURE_DATA or greater. Requests to
// run this algorithm include a target HTMLMediaElement object.
// The following steps are run:
// 1. Let the media element be the specified HTMLMediaElement
// object.
// 2. If the media element's playback blocked waiting for key
// value is true, abort these steps.
if (m_playbackBlockedWaitingForKey)
return;
// 3. Set the media element's playback blocked waiting for key
// value to true.
m_playbackBlockedWaitingForKey = true;
// NOTE
// As a result of the above step, the media element will become a
// blocked media element if it wasn't already. In that case, the
// media element will stop playback.
// 4. Follow the steps for the first matching condition from the
// following list:
// If data for the immediate current playback position is
// available
// Set the readyState of media element to HAVE_CURRENT_DATA.
// Otherwise
// Set the readyState of media element to HAVE_METADATA.
ReadyState nextReadyState = buffered()->contain(currentTime()) ? HAVE_CURRENT_DATA : HAVE_METADATA;
if (nextReadyState < m_readyState)
setReadyState(static_cast<MediaPlayer::ReadyState>(nextReadyState));
// NOTE
// In other words, if the video frame and audio data for the
// current playback position have been decoded because they were
// unencrypted and/or successfully decrypted, set readyState to
// HAVE_CURRENT_DATA. Otherwise, including if this was previously
// the case but the data is no longer available, set readyState to
// HAVE_METADATA.
// 5. Queue a task to fire a simple event named waitingforkey at the
// media element.
scheduleEvent(eventNames().waitingforkeyEvent);
// 6. Suspend playback.
// GStreamer handles this without suspending explicitly.
}
void HTMLMediaElement::attemptToDecrypt()
{
// https://w3c.github.io/encrypted-media/#attempt-to-decrypt
// W3C Editor's Draft 23 June 2017
// 1. Let the media element be the specified HTMLMediaElement object.
// 2. If the media element's encrypted block queue is empty, abort these steps.
// FIXME: ^
// 3. If the media element's mediaKeys attribute is not null, run the following steps:
if (m_mediaKeys) {
// 3.1. Let media keys be the MediaKeys object referenced by that attribute.
// 3.2. Let cdm be the CDM instance represented by media keys's cdm instance value.
auto& cdmInstance = m_mediaKeys->cdmInstance();
// 3.3. If cdm is no longer usable for any reason, run the following steps:
// 3.3.1. Run the media data is corrupted steps of the resource fetch algorithm.
// 3.3.2. Run the CDM Unavailable algorithm on media keys.
// 3.3.3. Abort these steps.
// FIXME: ^
// 3.4. If there is at least one MediaKeySession created by the media keys that is not closed, run the following steps:
if (m_mediaKeys->hasOpenSessions()) {
// Continued in MediaPlayer::attemptToDecryptWithInstance().
if (m_player)
m_player->attemptToDecryptWithInstance(cdmInstance);
}
}
// 4. Set the media element's decryption blocked waiting for key value to true.
// FIXME: ^
}
void HTMLMediaElement::attemptToResumePlaybackIfNecessary()
{
// https://w3c.github.io/encrypted-media/#resume-playback
// W3C Editor's Draft 23 June 2017
// 1. Let the media element be the specified HTMLMediaElement object.
// 2. If the media element's playback blocked waiting for key is false, abort these steps.
if (!m_playbackBlockedWaitingForKey)
return;
// 3. Run the Attempt to Decrypt algorithm on the media element.
attemptToDecrypt();
// NOTE: continued in HTMLMediaElement::waitingForKeyChanged()
}
void HTMLMediaElement::cdmClientAttemptToResumePlaybackIfNecessary()
{