Skip to content

Commit

Permalink
Add support for V4L2VDA on Linux
Browse files Browse the repository at this point in the history
This patch enables hardware assisted video decoding via the
Chromium V4L2VDA. Including changes when Linux is used. In
order to use this, use_linux_v4l2_only flag should be set
to true.

Signed-off-by: Ryo Kodama <ryo.kodama.vz@renesas.com>

fixup! avoid building not declared formats

"FRAME", "_SLICE",  "V4L2_PIX_FMT_VP9" are not defined in mainline
 Linux headers. This patch avoids building these formats.

Signed-off-by: Ryo Kodama <ryo.kodama.vz@renesas.com>

Issue #437
  • Loading branch information
dhobsong authored and msisov committed Oct 22, 2018
1 parent 13c8dfb commit 58cca76
Show file tree
Hide file tree
Showing 8 changed files with 62 additions and 13 deletions.
24 changes: 14 additions & 10 deletions media/gpu/BUILD.gn
Expand Up @@ -16,14 +16,15 @@ buildflag_header("buildflags") {
"USE_VAAPI=$use_vaapi",
"USE_V4L2_CODEC=$use_v4l2_codec",
"USE_LIBV4L2=$use_v4lplugin",
"USE_LINUX_V4L2=$use_linux_v4l2_only",
]
}

if (is_mac) {
import("//build/config/mac/mac_sdk.gni")
}

if (is_chromeos && use_v4lplugin) {
if (use_v4lplugin) {
action("libv4l2_generate_stubs") {
extra_header = "v4l2/v4l2_stub_header.fragment"

Expand Down Expand Up @@ -214,12 +215,11 @@ component("gpu") {
}
}

if (use_v4lplugin) {
sources += get_target_outputs(":libv4l2_generate_stubs")
deps += [ ":libv4l2_generate_stubs" ]
}

if (use_v4l2_codec) {
if (use_v4lplugin) {
sources += get_target_outputs(":libv4l2_generate_stubs")
deps += [ ":libv4l2_generate_stubs" ]
}
deps += [
"//third_party/libyuv",
"//ui/ozone",
Expand All @@ -231,15 +231,19 @@ component("gpu") {
"v4l2/v4l2_device.h",
"v4l2/v4l2_image_processor.cc",
"v4l2/v4l2_image_processor.h",
"v4l2/v4l2_jpeg_decode_accelerator.cc",
"v4l2/v4l2_jpeg_decode_accelerator.h",
"v4l2/v4l2_slice_video_decode_accelerator.cc",
"v4l2/v4l2_slice_video_decode_accelerator.h",
"v4l2/v4l2_video_decode_accelerator.cc",
"v4l2/v4l2_video_decode_accelerator.h",
"v4l2/v4l2_video_encode_accelerator.cc",
"v4l2/v4l2_video_encode_accelerator.h",
]
if (!use_linux_v4l2_only) {
sources += [
"v4l2_jpeg_decode_accelerator.cc",
"v4l2_jpeg_decode_accelerator.h",
"v4l2_slice_video_decode_accelerator.cc",
"v4l2_slice_video_decode_accelerator.h",
]
}
libs = [
"EGL",
"GLESv2",
Expand Down
4 changes: 4 additions & 0 deletions media/gpu/args.gni
Expand Up @@ -10,6 +10,10 @@ declare_args() {
# platforms which have v4l2 hardware encoder / decoder.
use_v4l2_codec = false

# Indicates that only definitions available in the mainline linux kernel
# will be used.
use_linux_v4l2_only = false

# Indicates if VA-API-based hardware acceleration is to be used. This
# is typically the case on x86-based ChromeOS devices.
use_vaapi = false
Expand Down
3 changes: 2 additions & 1 deletion media/gpu/gpu_jpeg_decode_accelerator_factory.cc
Expand Up @@ -13,7 +13,8 @@
#include "media/gpu/buildflags.h"
#include "media/gpu/fake_jpeg_decode_accelerator.h"

#if BUILDFLAG(USE_V4L2_CODEC) && defined(ARCH_CPU_ARM_FAMILY)
#if BUILDFLAG(USE_V4L2_CODEC) && defined(ARCH_CPU_ARM_FAMILY) && \
!BUILDFLAG(USE_LINUX_V4L2)
#define USE_V4L2_JDA
#endif

Expand Down
8 changes: 8 additions & 0 deletions media/gpu/gpu_video_decode_accelerator_factory.cc
Expand Up @@ -24,7 +24,9 @@
#endif
#if BUILDFLAG(USE_V4L2_CODEC)
#include "media/gpu/v4l2/v4l2_device.h"
#if !BUILDFLAG(USE_LINUX_V4L2)
#include "media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h"
#endif
#include "media/gpu/v4l2/v4l2_video_decode_accelerator.h"
#include "ui/gl/gl_surface_egl.h"
#endif
Expand Down Expand Up @@ -67,10 +69,12 @@ gpu::VideoDecodeAcceleratorCapabilities GetDecoderCapabilitiesInternal(
vda_profiles = V4L2VideoDecodeAccelerator::GetSupportedProfiles();
GpuVideoAcceleratorUtil::InsertUniqueDecodeProfiles(
vda_profiles, &capabilities.supported_profiles);
#if !BUILDFLAG(USE_LINUX_V4L2)
vda_profiles = V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles();
GpuVideoAcceleratorUtil::InsertUniqueDecodeProfiles(
vda_profiles, &capabilities.supported_profiles);
#endif
#endif
#if BUILDFLAG(USE_VAAPI)
vda_profiles = VaapiVideoDecodeAccelerator::GetSupportedProfiles();
GpuVideoAcceleratorUtil::InsertUniqueDecodeProfiles(
Expand Down Expand Up @@ -162,8 +166,10 @@ GpuVideoDecodeAcceleratorFactory::CreateVDA(
#endif
#if BUILDFLAG(USE_V4L2_CODEC)
&GpuVideoDecodeAcceleratorFactory::CreateV4L2VDA,
#if !BUILDFLAG(USE_LINUX_V4L2)
&GpuVideoDecodeAcceleratorFactory::CreateV4L2SVDA,
#endif
#endif
#if BUILDFLAG(USE_VAAPI)
&GpuVideoDecodeAcceleratorFactory::CreateVaapiVDA,
#endif
Expand Down Expand Up @@ -217,6 +223,7 @@ GpuVideoDecodeAcceleratorFactory::CreateV4L2VDA(
return decoder;
}

#if !BUILDFLAG(USE_LINUX_V4L2)
std::unique_ptr<VideoDecodeAccelerator>
GpuVideoDecodeAcceleratorFactory::CreateV4L2SVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
Expand All @@ -232,6 +239,7 @@ GpuVideoDecodeAcceleratorFactory::CreateV4L2SVDA(
return decoder;
}
#endif
#endif

#if BUILDFLAG(USE_VAAPI)
std::unique_ptr<VideoDecodeAccelerator>
Expand Down
2 changes: 2 additions & 0 deletions media/gpu/gpu_video_decode_accelerator_factory.h
Expand Up @@ -108,11 +108,13 @@ class MEDIA_GPU_EXPORT GpuVideoDecodeAcceleratorFactory {
const gpu::GpuDriverBugWorkarounds& workarounds,
const gpu::GpuPreferences& gpu_preferences,
MediaLog* media_log) const;
#if !BUILDFLAG(USE_LINUX_V4L2)
std::unique_ptr<VideoDecodeAccelerator> CreateV4L2SVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
const gpu::GpuPreferences& gpu_preferences,
MediaLog* media_log) const;
#endif
#endif
#if BUILDFLAG(USE_VAAPI)
std::unique_ptr<VideoDecodeAccelerator> CreateVaapiVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
Expand Down
6 changes: 5 additions & 1 deletion media/gpu/v4l2/generic_v4l2_device.cc
Expand Up @@ -475,9 +475,13 @@ bool GenericV4L2Device::OpenDevicePath(const std::string& path, Type type) {
return false;

#if BUILDFLAG(USE_LIBV4L2)
#if BUILDFLAG(USE_LINUX_V4L2)
if (
#else
if (type == Type::kEncoder &&
#endif
HANDLE_EINTR(v4l2_fd_open(device_fd_.get(), V4L2_DISABLE_CONVERSION)) !=
-1) {
-1) {
VLOGF(2) << "Using libv4l2 for " << path;
use_libv4l2_ = true;
}
Expand Down
22 changes: 22 additions & 0 deletions media/gpu/v4l2/v4l2_device.cc
Expand Up @@ -891,6 +891,19 @@ uint32_t V4L2Device::VideoPixelFormatToV4L2PixFmt(VideoPixelFormat format) {
}

// static
#if BUILDFLAG(USE_LINUX_V4L2)
uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
bool slice_based) {
if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) {
return V4L2_PIX_FMT_H264;
} else if (profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX) {
return V4L2_PIX_FMT_VP8;
} else {
LOG(FATAL) << "Add more cases as needed";
return 0;
}
}
#else
uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
bool slice_based) {
if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) {
Expand Down Expand Up @@ -931,6 +944,7 @@ VideoCodecProfile V4L2Device::V4L2VP9ProfileToVideoCodecProfile(
return VIDEO_CODEC_PROFILE_UNKNOWN;
}
}
#endif

// static
std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
Expand All @@ -941,7 +955,9 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(

switch (pix_fmt) {
case V4L2_PIX_FMT_H264:
#if !BUILDFLAG(USE_LINUX_V4L2)
case V4L2_PIX_FMT_H264_SLICE:
#endif
if (is_encoder) {
// TODO(posciak): need to query the device for supported H.264 profiles,
// for now choose Main as a sensible default.
Expand All @@ -954,11 +970,14 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
break;

case V4L2_PIX_FMT_VP8:
#if !BUILDFLAG(USE_LINUX_V4L2)
case V4L2_PIX_FMT_VP8_FRAME:
#endif
min_profile = VP8PROFILE_MIN;
max_profile = VP8PROFILE_MAX;
break;

#if !BUILDFLAG(USE_LINUX_V4L2)
case V4L2_PIX_FMT_VP9:
case V4L2_PIX_FMT_VP9_FRAME: {
v4l2_queryctrl query_ctrl = {};
Expand All @@ -985,6 +1004,7 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
}
break;
}
#endif

default:
VLOGF(1) << "Unhandled pixelformat " << FourccToString(pix_fmt);
Expand Down Expand Up @@ -1014,8 +1034,10 @@ uint32_t V4L2Device::V4L2PixFmtToDrmFormat(uint32_t format) {
case V4L2_PIX_FMT_RGB32:
return DRM_FORMAT_ARGB8888;

#if !BUILDFLAG(USE_LINUX_V4L2)
case V4L2_PIX_FMT_MT21:
return DRM_FORMAT_MT21;
#endif

default:
DVLOGF(1) << "Unrecognized format " << FourccToString(format);
Expand Down
6 changes: 5 additions & 1 deletion media/gpu/v4l2/v4l2_video_decode_accelerator.cc
Expand Up @@ -27,6 +27,7 @@
#include "media/base/scopedfd_helper.h"
#include "media/base/unaligned_shared_memory.h"
#include "media/base/video_types.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/v4l2/v4l2_image_processor.h"
#include "media/video/h264_parser.h"
#include "ui/gfx/geometry/rect.h"
Expand Down Expand Up @@ -69,7 +70,10 @@ namespace media {

// static
const uint32_t V4L2VideoDecodeAccelerator::supported_input_fourccs_[] = {
V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9,
V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8,
#if !BUILDFLAG(USE_LINUX_V4L2)
V4L2_PIX_FMT_VP9,
#endif
};

struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
Expand Down

0 comments on commit 58cca76

Please sign in to comment.