Skip to content

Commit

Permalink
More dynamicDowncast<> adoption in platform code
Browse files Browse the repository at this point in the history
https://bugs.webkit.org/show_bug.cgi?id=270069

Reviewed by Chris Dumez.

For security & performance.

Part 2 of going through all downcasts in platform code and tidying them
up as needed.

* Source/WebCore/platform/audio/cocoa/AudioSampleDataSource.mm:
(WebCore::AudioSampleDataSource::pushSamples):
* Source/WebCore/platform/cocoa/DragImageCocoa.mm:
(WebCore::createDragImageFromImage):
* Source/WebCore/platform/cocoa/PlaybackSessionModelMediaElement.mm:
(WebCore::PlaybackSessionModelMediaElement::toggleFullscreen):
(WebCore::PlaybackSessionModelMediaElement::togglePictureInPicture):
(WebCore::PlaybackSessionModelMediaElement::toggleInWindowFullscreen):
* Source/WebCore/platform/graphics/AudioTrackPrivate.h:
(WebCore::AudioTrackPrivate::setEnabled):
(WebCore::AudioTrackPrivate::setConfiguration):
* Source/WebCore/platform/graphics/VideoTrackPrivate.h:
(WebCore::VideoTrackPrivate::setSelected):
(WebCore::VideoTrackPrivate::setConfiguration):
* Source/WebCore/platform/graphics/avfoundation/objc/InbandChapterTrackPrivateAVFObjC.mm:
(WebCore::InbandChapterTrackPrivateAVFObjC::processChapters):
* Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
(WebCore::MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached):
* Source/WebCore/platform/graphics/ca/cocoa/PlatformCAAnimationCocoa.mm:
(WebCore::toCAMediaTimingFunction):
(WebCore::PlatformCAAnimationCocoa::setAnimations):
* Source/WebCore/platform/graphics/ca/cocoa/PlatformCAFiltersCocoa.mm:
(WebCore::PlatformCAFilters::filterValueForOperation):
* Source/WebCore/platform/graphics/cocoa/CMUtilities.mm:
(WebCore::createFormatDescriptionFromTrackInfo):
* Source/WebCore/platform/graphics/cocoa/SourceBufferParserWebM.cpp:
(WebCore::WebMParser::flushPendingVideoSamples):
* Source/WebCore/platform/graphics/cpu/arm/filters/FELightingNeonParallelApplier.cpp:
(WebCore::FELightingNeonParallelApplier::applyPlatformParallel const):
* Source/WebCore/platform/graphics/displaylists/DisplayListRecorder.cpp:
(WebCore::DisplayList::Recorder::drawSystemImage):

Canonical link: https://commits.webkit.org/275322@main
  • Loading branch information
annevk committed Feb 26, 2024
1 parent 3ba5746 commit 90d13e7
Show file tree
Hide file tree
Showing 13 changed files with 50 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@

void AudioSampleDataSource::pushSamples(const MediaTime& sampleTime, const PlatformAudioData& audioData, size_t sampleCount)
{
ASSERT(is<WebAudioBufferList>(audioData));
pushSamplesInternal(*downcast<WebAudioBufferList>(audioData).list(), sampleTime, sampleCount);
}

Expand Down
6 changes: 2 additions & 4 deletions Source/WebCore/platform/cocoa/DragImageCocoa.mm
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,9 @@ void deleteDragImage(RetainPtr<NSImage>)

RetainPtr<NSImage> createDragImageFromImage(Image* image, ImageOrientation orientation)
{
if (is<BitmapImage>(*image)) {
BitmapImage& bitmapImage = downcast<BitmapImage>(*image);

if (auto* bitmapImage = dynamicDowncast<BitmapImage>(*image)) {
if (orientation == ImageOrientation::Orientation::FromImage)
orientation = bitmapImage.orientationForCurrentFrame();
orientation = bitmapImage->orientationForCurrentFrame();

if (orientation != ImageOrientation::Orientation::None) {
// Construct a correctly-rotated copy of the image to use as the drag image.
Expand Down
38 changes: 19 additions & 19 deletions Source/WebCore/platform/cocoa/PlaybackSessionModelMediaElement.mm
Original file line number Diff line number Diff line change
Expand Up @@ -342,47 +342,47 @@
void PlaybackSessionModelMediaElement::toggleFullscreen()
{
#if ENABLE(VIDEO_PRESENTATION_MODE)
ASSERT(is<HTMLVideoElement>(*m_mediaElement));
if (!is<HTMLVideoElement>(*m_mediaElement))
auto* element = dynamicDowncast<HTMLVideoElement>(*m_mediaElement);
ASSERT(element);
if (!element)
return;

auto& element = downcast<HTMLVideoElement>(*m_mediaElement);
if (element.fullscreenMode() == MediaPlayerEnums::VideoFullscreenModeStandard)
element.setPresentationMode(HTMLVideoElement::VideoPresentationMode::Inline);
if (element->fullscreenMode() == MediaPlayerEnums::VideoFullscreenModeStandard)
element->setPresentationMode(HTMLVideoElement::VideoPresentationMode::Inline);
else
element.setPresentationMode(HTMLVideoElement::VideoPresentationMode::Fullscreen);
element->setPresentationMode(HTMLVideoElement::VideoPresentationMode::Fullscreen);
#endif
}

void PlaybackSessionModelMediaElement::togglePictureInPicture()
{
#if ENABLE(VIDEO_PRESENTATION_MODE)
ASSERT(is<HTMLVideoElement>(*m_mediaElement));
if (!is<HTMLVideoElement>(*m_mediaElement))
auto* element = dynamicDowncast<HTMLVideoElement>(*m_mediaElement);
ASSERT(element);
if (!element)
return;

auto& element = downcast<HTMLVideoElement>(*m_mediaElement);
if (element.fullscreenMode() == MediaPlayerEnums::VideoFullscreenModePictureInPicture)
element.setPresentationMode(HTMLVideoElement::VideoPresentationMode::Inline);
if (element->fullscreenMode() == MediaPlayerEnums::VideoFullscreenModePictureInPicture)
element->setPresentationMode(HTMLVideoElement::VideoPresentationMode::Inline);
else
element.setPresentationMode(HTMLVideoElement::VideoPresentationMode::PictureInPicture);
element->setPresentationMode(HTMLVideoElement::VideoPresentationMode::PictureInPicture);
#endif
}

void PlaybackSessionModelMediaElement::toggleInWindowFullscreen()
{
#if ENABLE(VIDEO_PRESENTATION_MODE)
ASSERT(is<HTMLVideoElement>(*m_mediaElement));
if (!is<HTMLVideoElement>(*m_mediaElement))
auto* element = dynamicDowncast<HTMLVideoElement>(*m_mediaElement);
ASSERT(element);
if (!element)
return;

auto& element = downcast<HTMLVideoElement>(*m_mediaElement);
UserGestureIndicator indicator(IsProcessingUserGesture::Yes, &element.document());
UserGestureIndicator indicator { IsProcessingUserGesture::Yes, &element->document() };

if (element.fullscreenMode() == MediaPlayerEnums::VideoFullscreenModeInWindow)
element.setPresentationMode(HTMLVideoElement::VideoPresentationMode::Inline);
if (element->fullscreenMode() == MediaPlayerEnums::VideoFullscreenModeInWindow)
element->setPresentationMode(HTMLVideoElement::VideoPresentationMode::Inline);
else
element.setPresentationMode(HTMLVideoElement::VideoPresentationMode::InWindow);
element->setPresentationMode(HTMLVideoElement::VideoPresentationMode::InWindow);
#endif
}

Expand Down
2 changes: 0 additions & 2 deletions Source/WebCore/platform/graphics/AudioTrackPrivate.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ class AudioTrackPrivate : public TrackPrivateBase {
return;
m_enabled = enabled;
notifyClients([enabled](auto& client) {
ASSERT(is<AudioTrackPrivateClient>(client));
downcast<AudioTrackPrivateClient>(client).enabledChanged(enabled);
});
if (m_enabledChangedCallback)
Expand All @@ -68,7 +67,6 @@ class AudioTrackPrivate : public TrackPrivateBase {
return;
m_configuration = WTFMove(configuration);
notifyClients([configuration = m_configuration](auto& client) {
ASSERT(is<AudioTrackPrivateClient>(client));
downcast<AudioTrackPrivateClient>(client).configurationChanged(configuration);
});
}
Expand Down
2 changes: 0 additions & 2 deletions Source/WebCore/platform/graphics/VideoTrackPrivate.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ class VideoTrackPrivate : public TrackPrivateBase {
return;
m_selected = selected;
notifyClients([selected](auto& client) {
ASSERT(is<VideoTrackPrivateClient>(client));
downcast<VideoTrackPrivateClient>(client).selectedChanged(selected);
});
if (m_selectedChangedCallback)
Expand All @@ -69,7 +68,6 @@ class VideoTrackPrivate : public TrackPrivateBase {
return;
m_configuration = WTFMove(configuration);
notifyClients([configuration = m_configuration](auto& client) {
ASSERT(is<VideoTrackPrivateClient>(client));
downcast<VideoTrackPrivateClient>(client).configurationChanged(configuration);
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@
ISOWebVTTCue cueData = ISOWebVTTCue(PAL::toMediaTime([item time]), PAL::toMediaTime([item duration]), AtomString::number(chapterNumber), [item stringValue]);
INFO_LOG(identifier, "created cue ", cueData);
notifyMainThreadClient([cueData = WTFMove(cueData)](TrackPrivateBaseClient& client) mutable {
ASSERT(is<InbandTextTrackPrivateClient>(client));
downcast<InbandTextTrackPrivateClient>(client).parseWebVTTCueData(WTFMove(cueData));
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2951,17 +2951,17 @@ void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC*
void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
{
#if HAVE(AVCONTENTKEYSESSION)
if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
auto* fpsInstance = dynamicDowncast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
if (!fpsInstance)
return;

auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
if (&fpsInstance == m_cdmInstance)
if (fpsInstance == m_cdmInstance)
return;

if (m_cdmInstance)
cdmInstanceDetached(*m_cdmInstance);

m_cdmInstance = &fpsInstance;
m_cdmInstance = fpsInstance;
#else
UNUSED_PARAM(instance);
#endif
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,9 @@ void setHasExplicitBeginTime(CAAnimation *animation, bool value)

CAMediaTimingFunction* toCAMediaTimingFunction(const TimingFunction& timingFunction, bool reverse)
{
if (is<CubicBezierTimingFunction>(timingFunction)) {
if (auto* cubic = dynamicDowncast<CubicBezierTimingFunction>(timingFunction)) {
RefPtr<CubicBezierTimingFunction> reversed;
std::reference_wrapper<const CubicBezierTimingFunction> function = downcast<CubicBezierTimingFunction>(timingFunction);
std::reference_wrapper<const CubicBezierTimingFunction> function = *cubic;

if (reverse) {
reversed = function.get().createReversed();
Expand Down Expand Up @@ -581,8 +581,8 @@ void setHasExplicitBeginTime(CAAnimation *animation, bool value)
ASSERT([static_cast<CAAnimation *>(m_animation.get()) isKindOfClass:[CAAnimationGroup class]]);

[static_cast<CAAnimationGroup *>(m_animation.get()) setAnimations:createNSArray(value, [&] (auto& animation) -> CAAnimation * {
if (is<PlatformCAAnimationCocoa>(animation))
return downcast<PlatformCAAnimationCocoa>(animation.get())->m_animation.get();
if (auto* platformAnimation = dynamicDowncast<PlatformCAAnimationCocoa>(animation.get()))
return platformAnimation->m_animation.get();
return nil;
}).get()];
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -355,8 +355,8 @@ static unsigned keyValueCountForFilter(const FilterOperation& filterOperation)
FilterOperation::Type type = operation->type();
RetainPtr<id> value;

if (is<DefaultFilterOperation>(*operation)) {
type = downcast<DefaultFilterOperation>(*operation).representedType();
if (auto* defaultOperation = dynamicDowncast<DefaultFilterOperation>(*operation)) {
type = defaultOperation->representedType();
operation = nullptr;
}

Expand Down
11 changes: 5 additions & 6 deletions Source/WebCore/platform/graphics/cocoa/CMUtilities.mm
Original file line number Diff line number Diff line change
Expand Up @@ -138,23 +138,22 @@ static CFStringRef convertToCMYCbCRMatrix(PlatformVideoMatrixCoefficients coeffi
{
ASSERT(info.isVideo() || info.isAudio());

if (info.isAudio()) {
auto& audioInfo = downcast<const AudioInfo>(info);
if (!audioInfo.cookieData || !audioInfo.cookieData->size())
if (auto* audioInfo = dynamicDowncast<AudioInfo>(info)) {
if (!audioInfo->cookieData || !audioInfo->cookieData->size())
return nullptr;

switch (audioInfo.codecName.value) {
switch (audioInfo->codecName.value) {
#if ENABLE(OPUS)
case kAudioFormatOpus:
if (!isOpusDecoderAvailable())
return nullptr;
return createAudioFormatDescription(audioInfo);
return createAudioFormatDescription(*audioInfo);
#endif
#if ENABLE(VORBIS)
case kAudioFormatVorbis:
if (!isVorbisDecoderAvailable())
return nullptr;
return createAudioFormatDescription(audioInfo);
return createAudioFormatDescription(*audioInfo);
#endif
default:
return nullptr;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1056,8 +1056,8 @@ webm::Status WebMParser::TrackData::readFrameData(webm::Reader& reader, const we
void WebMParser::flushPendingVideoSamples()
{
for (auto& track : m_tracks) {
if (track->trackType() == TrackInfo::TrackType::Video)
downcast<WebMParser::VideoTrackData>(track.get()).flushPendingSamples();
if (auto* videoTrack = dynamicDowncast<WebMParser::VideoTrackData>(track.get()))
videoTrack->flushPendingSamples();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -542,19 +542,17 @@ void FELightingNeonParallelApplier::applyPlatformParallel(const LightingData& da
floatArguments.colorBlue = color.blue;
floatArguments.padding4 = 0;

if (data.lightSource->type() == LS_POINT) {
if (auto* pointLightSource = dynamicDowncast<PointLightSource>(*data.lightSource)) {
neonData.flags |= FLAG_POINT_LIGHT;
auto& pointLightSource = downcast<PointLightSource>(*data.lightSource);
floatArguments.lightX = pointLightSource.position().x();
floatArguments.lightY = pointLightSource.position().y();
floatArguments.lightZ = pointLightSource.position().z();
floatArguments.lightX = pointLightSource->position().x();
floatArguments.lightY = pointLightSource->position().y();
floatArguments.lightZ = pointLightSource->position().z();
floatArguments.padding2 = 0;
} else if (data.lightSource->type() == LS_SPOT) {
} else if (auto* spotLightSource = dynamicDowncast<SpotLightSource>(*data.lightSource)) {
neonData.flags |= FLAG_SPOT_LIGHT;
auto& spotLightSource = downcast<SpotLightSource>(*data.lightSource);
floatArguments.lightX = spotLightSource.position().x();
floatArguments.lightY = spotLightSource.position().y();
floatArguments.lightZ = spotLightSource.position().z();
floatArguments.lightX = spotLightSource->position().x();
floatArguments.lightY = spotLightSource->position().y();
floatArguments.lightZ = spotLightSource->position().z();
floatArguments.padding2 = 0;

floatArguments.directionX = paintingData.directionVector.x();
Expand All @@ -565,8 +563,8 @@ void FELightingNeonParallelApplier::applyPlatformParallel(const LightingData& da
floatArguments.coneCutOffLimit = paintingData.coneCutOffLimit;
floatArguments.coneFullLight = paintingData.coneFullLight;
floatArguments.coneCutOffRange = paintingData.coneCutOffLimit - paintingData.coneFullLight;
neonData.coneExponent = getPowerCoefficients(spotLightSource.specularExponent());
if (spotLightSource.specularExponent() == 1)
neonData.coneExponent = getPowerCoefficients(spotLightSource->specularExponent());
if (spotLightSource->specularExponent() == 1)
neonData.flags |= FLAG_CONE_EXPONENT_IS_1;
} else {
ASSERT(data.lightSource->type() == LS_DISTANT);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -299,8 +299,8 @@ void Recorder::drawSystemImage(SystemImage& systemImage, const FloatRect& destin
{
appendStateChangeItemIfNecessary();
#if USE(SYSTEM_PREVIEW)
if (is<ARKitBadgeSystemImage>(systemImage)) {
if (auto image = downcast<ARKitBadgeSystemImage>(systemImage).image()) {
if (auto* badgeSystemImage = dynamicDowncast<ARKitBadgeSystemImage>(systemImage)) {
if (auto image = badgeSystemImage->image()) {
auto nativeImage = image->nativeImage();
if (!nativeImage)
return;
Expand Down

0 comments on commit 90d13e7

Please sign in to comment.