/
mythv4l2m2mcontext.cpp
467 lines (404 loc) · 16 KB
/
mythv4l2m2mcontext.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
// Qt
#include <QDir>
// MythTV
#include "mythlogging.h"
#include "v4l2util.h"
#include "fourcc.h"
#include "avformatdecoder.h"
#include "mythplayerui.h"
#include "opengl/mythrenderopengl.h"
#ifdef USING_EGL
#include "opengl/mythdrmprimeinterop.h"
#endif
#include "mythv4l2m2mcontext.h"
#ifdef USING_MMAL
#include "mythmmalcontext.h"
#endif
// Sys
#include <sys/ioctl.h>
// FFmpeg
extern "C" {
#include "libavutil/opt.h"
}
#define LOC QString("V4L2_M2M: ")
static bool s_useV4L2Request = !qEnvironmentVariableIsEmpty("MYTHTV_V4L2_REQUEST");
/*! \class MythV4L2M2MContext
* \brief A handler for V4L2 Memory2Memory codecs.
*
* The bulk of the 'direct rendering' support is in MythDRMPRIMEContext. This
* sub-class handles v4l2 specific functionality checks and support for software
* frame formats.
*/
MythV4L2M2MContext::MythV4L2M2MContext(DecoderBase *Parent, MythCodecID CodecID)
: MythDRMPRIMEContext(Parent, CodecID)
{
}
bool MythV4L2M2MContext::DecoderWillResetOnFlush(void)
{
return codec_is_v4l2(m_codecID);
}
MythCodecID MythV4L2M2MContext::GetSupportedCodec(AVCodecContext **Context,
AVCodec **Codec,
const QString &Decoder,
AVStream *Stream,
uint StreamType)
{
bool decodeonly = Decoder == "v4l2-dec";
auto success = static_cast<MythCodecID>((decodeonly ? kCodec_MPEG1_V4L2_DEC : kCodec_MPEG1_V4L2) + (StreamType - 1));
auto failure = static_cast<MythCodecID>(kCodec_MPEG1 + (StreamType - 1));
// not us
if (!Decoder.startsWith("v4l2"))
return failure;
if (!decodeonly)
if (!FrameTypeIsSupported(*Context, FMT_DRMPRIME))
return failure;
// supported by device driver?
MythCodecContext::CodecProfile mythprofile = MythCodecContext::NoProfile;
switch ((*Codec)->id)
{
case AV_CODEC_ID_MPEG1VIDEO: mythprofile = MythCodecContext::MPEG1; break;
case AV_CODEC_ID_MPEG2VIDEO: mythprofile = MythCodecContext::MPEG2; break;
case AV_CODEC_ID_MPEG4: mythprofile = MythCodecContext::MPEG4; break;
case AV_CODEC_ID_H263: mythprofile = MythCodecContext::H263; break;
case AV_CODEC_ID_H264: mythprofile = MythCodecContext::H264; break;
case AV_CODEC_ID_VC1: mythprofile = MythCodecContext::VC1; break;
case AV_CODEC_ID_VP8: mythprofile = MythCodecContext::VP8; break;
case AV_CODEC_ID_VP9: mythprofile = MythCodecContext::VP9; break;
case AV_CODEC_ID_HEVC: mythprofile = MythCodecContext::HEVC; break;
default: break;
}
if (mythprofile == MythCodecContext::NoProfile)
return failure;
const V4L2Profiles& profiles = MythV4L2M2MContext::GetProfiles();
if (!profiles.contains(mythprofile))
return failure;
#ifdef USING_MMAL
// If MMAL is available, assume this is a Raspberry Pi and check the supported
// video sizes
if (!MythMMALContext::CheckCodecSize((*Context)->width, (*Context)->height, mythprofile))
return failure;
// As for MMAL, don't try and decode 10bit H264
if ((*Codec)->id == AV_CODEC_ID_H264)
{
if ((*Context)->profile == FF_PROFILE_H264_HIGH_10 ||
(*Context)->profile == FF_PROFILE_H264_HIGH_10_INTRA)
{
return failure;
}
}
#endif
if (s_useV4L2Request && !decodeonly)
{
LOG(VB_GENERAL, LOG_INFO, LOC + QString("Forcing support for %1 v42l_request")
.arg(ff_codec_id_string((*Context)->codec_id)));
(*Context)->pix_fmt = AV_PIX_FMT_DRM_PRIME;
return success;
}
return MythDRMPRIMEContext::GetPrimeCodec(Context, Codec, Stream,
success, failure, "v4l2m2m",
decodeonly ? (*Context)->pix_fmt : AV_PIX_FMT_DRM_PRIME);
}
int MythV4L2M2MContext::HwDecoderInit(AVCodecContext *Context)
{
if (!Context)
return -1;
if (s_useV4L2Request && codec_is_v4l2(m_codecID))
return 0;
if (codec_is_v4l2_dec(m_codecID))
return 0;
return MythDRMPRIMEContext::HwDecoderInit(Context);
}
void MythV4L2M2MContext::InitVideoCodec(AVCodecContext *Context, bool SelectedStream, bool &DirectRendering)
{
if (s_useV4L2Request && codec_is_v4l2(m_codecID))
{
Context->get_format = MythV4L2M2MContext::GetV4L2RequestFormat;
return;
}
if (codec_is_v4l2_dec(m_codecID))
{
DirectRendering = false;
return;
}
MythDRMPRIMEContext::InitVideoCodec(Context, SelectedStream, DirectRendering);
}
bool MythV4L2M2MContext::RetrieveFrame(AVCodecContext *Context, MythVideoFrame *Frame, AVFrame *AvFrame)
{
if (s_useV4L2Request && codec_is_v4l2(m_codecID))
return MythCodecContext::GetBuffer2(Context, Frame, AvFrame, 0);
if (codec_is_v4l2_dec(m_codecID))
return GetBuffer(Context, Frame, AvFrame, 0);
return MythDRMPRIMEContext::RetrieveFrame(Context, Frame, AvFrame);
}
/*! \brief Reduce the number of capture buffers
*
* Testing on Pi 3, the default of 20 is too high and leads to memory allocation
* failures in the the kernel driver.
*/
void MythV4L2M2MContext::SetDecoderOptions(AVCodecContext* Context, AVCodec* Codec)
{
if (s_useV4L2Request && codec_is_v4l2(m_codecID))
return;
if (!(Context && Codec))
return;
if (!(Codec->priv_class && Context->priv_data))
return;
// best guess currently - this matches the number of capture buffers to the
// number of output buffers - and hence to the number of video buffers for
// direct rendering
LOG(VB_PLAYBACK, LOG_INFO, LOC + "Setting number of capture buffers to 6");
av_opt_set_int(Context->priv_data, "num_capture_buffers", 6, 0);
}
/*! \brief Retrieve a frame from CPU memory
*
* This is similar to the default, direct render supporting, get_av_buffer in
* AvFormatDecoder but we copy the data from the AVFrame rather than providing
* our own buffer (the codec does not support direct rendering).
*/
bool MythV4L2M2MContext::GetBuffer(AVCodecContext *Context, MythVideoFrame *Frame, AVFrame *AvFrame, int /*Flags*/)
{
// Sanity checks
if (!Context || !AvFrame || !Frame)
return false;
// Ensure we can render this format
auto *decoder = static_cast<AvFormatDecoder*>(Context->opaque);
VideoFrameType type = MythAVUtil::PixelFormatToFrameType(static_cast<AVPixelFormat>(AvFrame->format));
const VideoFrameTypes* supported = Frame->m_renderFormats;
auto foundIt = std::find(supported->cbegin(), supported->cend(), type);
// No fallback currently (unlikely)
if (foundIt == supported->end())
return false;
// Re-allocate if necessary
if ((Frame->m_type != type) || (Frame->m_width != AvFrame->width) || (Frame->m_height != AvFrame->height))
if (!VideoBuffers::ReinitBuffer(Frame, type, decoder->GetVideoCodecID(), AvFrame->width, AvFrame->height))
return false;
// Copy data
uint count = MythVideoFrame::GetNumPlanes(Frame->m_type);
for (uint plane = 0; plane < count; ++plane)
{
MythVideoFrame::CopyPlane(Frame->m_buffer + Frame->m_offsets[plane],Frame->m_pitches[plane],
AvFrame->data[plane], AvFrame->linesize[plane],
MythVideoFrame::GetPitchForPlane(Frame->m_type, AvFrame->width, plane),
MythVideoFrame::GetHeightForPlane(Frame->m_type, AvFrame->height, plane));
}
return true;
}
#ifndef V4L2_PIX_FMT_HEVC
#define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C')
#endif
#ifndef V4L2_PIX_FMT_VP9
#define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0')
#endif
const V4L2Profiles& MythV4L2M2MContext::GetProfiles(void)
{
using V4L2Mapping = QPair<const uint32_t, const MythCodecContext::CodecProfile>;
static const std::array<const V4L2Mapping,9> s_map
{{
{ V4L2_PIX_FMT_MPEG1, MythCodecContext::MPEG1 },
{ V4L2_PIX_FMT_MPEG2, MythCodecContext::MPEG2 },
{ V4L2_PIX_FMT_MPEG4, MythCodecContext::MPEG4 },
{ V4L2_PIX_FMT_H263, MythCodecContext::H263 },
{ V4L2_PIX_FMT_H264, MythCodecContext::H264 },
{ V4L2_PIX_FMT_VC1_ANNEX_G, MythCodecContext::VC1 },
{ V4L2_PIX_FMT_VP8, MythCodecContext::VP8 },
{ V4L2_PIX_FMT_VP9, MythCodecContext::VP9 },
{ V4L2_PIX_FMT_HEVC, MythCodecContext::HEVC }
}};
static QMutex lock(QMutex::Recursive);
static bool s_initialised = false;
static V4L2Profiles s_profiles;
QMutexLocker locker(&lock);
if (s_initialised)
return s_profiles;
s_initialised = true;
if (s_useV4L2Request)
{
LOG(VB_GENERAL, LOG_INFO, LOC + "V4L2Request support endabled - assuming all available");
for (auto profile : s_map)
s_profiles.append(profile.second);
return s_profiles;
}
const QString root("/dev/");
QDir dir(root);
QStringList namefilters;
namefilters.append("video*");
QStringList devices = dir.entryList(namefilters, QDir::Files |QDir::System);
for (const QString& device : qAsConst(devices))
{
V4L2util v4l2dev(root + device);
uint32_t caps = v4l2dev.GetCapabilities();
LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Device: %1 Driver: '%2' Capabilities: 0x%3")
.arg(v4l2dev.GetDeviceName()).arg(v4l2dev.GetDriverName()).arg(caps, 0, 16));
// check capture and output support
// these mimic the device checks in v4l2_m2m.c
bool mplanar = ((caps & (V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE)) != 0U) &&
((caps & V4L2_CAP_STREAMING) != 0U);
bool mplanarm2m = (caps & V4L2_CAP_VIDEO_M2M_MPLANE) != 0U;
bool splanar = ((caps & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT)) != 0U) &&
((caps & V4L2_CAP_STREAMING) != 0U);
bool splanarm2m = (caps & V4L2_CAP_VIDEO_M2M) != 0U;
if (!(mplanar || mplanarm2m || splanar || splanarm2m))
continue;
v4l2_buf_type capturetype = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf_type outputtype = V4L2_BUF_TYPE_VIDEO_OUTPUT;
if (mplanar || mplanarm2m)
{
capturetype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
outputtype = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
}
// check codec support
QStringList debug;
QSize dummy{0, 0};
for (auto profile : s_map)
{
bool found = false;
uint32_t v4l2pixfmt = profile.first;
MythCodecContext::CodecProfile mythprofile = profile.second;
struct v4l2_fmtdesc fdesc {};
memset(&fdesc, 0, sizeof(fdesc));
// check output first
fdesc.type = outputtype;
while (!found)
{
int res = ioctl(v4l2dev.FD(), VIDIOC_ENUM_FMT, &fdesc);
if (res)
break;
if (fdesc.pixelformat == v4l2pixfmt)
found = true;
fdesc.index++;
}
if (found)
{
QStringList pixformats;
bool foundfmt = false;
// check capture
memset(&fdesc, 0, sizeof(fdesc));
fdesc.type = capturetype;
while (true)
{
int res = ioctl(v4l2dev.FD(), VIDIOC_ENUM_FMT, &fdesc);
if (res)
break;
pixformats.append(fourcc_str(static_cast<int>(fdesc.pixelformat)));
// this is a bit of a shortcut
if (fdesc.pixelformat == V4L2_PIX_FMT_YUV420 ||
fdesc.pixelformat == V4L2_PIX_FMT_YVU420 ||
fdesc.pixelformat == V4L2_PIX_FMT_YUV420M ||
fdesc.pixelformat == V4L2_PIX_FMT_YVU420M ||
fdesc.pixelformat == V4L2_PIX_FMT_NV12 ||
fdesc.pixelformat == V4L2_PIX_FMT_NV12M ||
fdesc.pixelformat == V4L2_PIX_FMT_NV21 ||
fdesc.pixelformat == V4L2_PIX_FMT_NV21M)
{
if (!s_profiles.contains(mythprofile))
s_profiles.append(mythprofile);
foundfmt = true;
break;
}
fdesc.index++;
}
if (!foundfmt)
{
if (pixformats.isEmpty())
pixformats.append("None");
LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Codec '%1' has no supported formats (Supported: %2)")
.arg(MythCodecContext::GetProfileDescription(mythprofile, dummy)).arg(pixformats.join((","))));
}
}
}
}
return s_profiles;
}
void MythV4L2M2MContext::GetDecoderList(QStringList &Decoders)
{
const V4L2Profiles& profiles = MythV4L2M2MContext::GetProfiles();
if (profiles.isEmpty())
return;
QSize size(0, 0);
Decoders.append("V4L2:");
for (MythCodecContext::CodecProfile profile : profiles)
Decoders.append(MythCodecContext::GetProfileDescription(profile, size));
}
bool MythV4L2M2MContext::HaveV4L2Codecs(void)
{
static QMutex lock(QMutex::Recursive);
QMutexLocker locker(&lock);
static bool s_checked = false;
static bool s_available = false;
if (s_checked)
return s_available;
s_checked = true;
const V4L2Profiles& profiles = MythV4L2M2MContext::GetProfiles();
if (profiles.isEmpty())
{
LOG(VB_GENERAL, LOG_INFO, LOC + "No V4L2 decoders found");
return s_available;
}
LOG(VB_GENERAL, LOG_INFO, LOC + "Supported/available V4L2 decoders:");
s_available = true;
QSize size{0, 0};
for (auto profile : qAsConst(profiles))
LOG(VB_GENERAL, LOG_INFO, LOC + MythCodecContext::GetProfileDescription(profile, size));
return s_available;
}
AVPixelFormat MythV4L2M2MContext::GetV4L2RequestFormat(AVCodecContext *Context, const AVPixelFormat *PixFmt)
{
while (*PixFmt != AV_PIX_FMT_NONE)
{
if (*PixFmt == AV_PIX_FMT_DRM_PRIME)
{
if (MythCodecContext::InitialiseDecoder(Context, MythV4L2M2MContext::InitialiseV4L2RequestContext,
"V4L2 request context creation") >= 0)
return AV_PIX_FMT_DRM_PRIME;
}
PixFmt++;
}
return AV_PIX_FMT_NONE;
}
int MythV4L2M2MContext::InitialiseV4L2RequestContext(AVCodecContext *Context)
{
if (!Context || !gCoreContext->IsUIThread())
return -1;
// The interop must have a reference to the ui player so it can be deleted
// from the main thread.
MythPlayerUI* player = GetPlayerUI(Context);
if (!player)
return -1;
// Retrieve OpenGL render context
auto * render = dynamic_cast<MythRenderOpenGL*>(player->GetRender());
if (!render)
return -1;
OpenGLLocker locker(render);
// Create interop
MythOpenGLInterop *interop = nullptr;
#ifdef USING_EGL
interop = MythDRMPRIMEInterop::CreateDRM(render);
#endif
if (!interop)
return -1;
// Set the player required to process interop release
interop->SetPlayer(player);
// Allocate the device context
AVBufferRef* hwdeviceref = MythCodecContext::CreateDevice(AV_HWDEVICE_TYPE_DRM, interop);
if (!hwdeviceref)
{
interop->DecrRef();
return -1;
}
auto* hwdevicecontext = reinterpret_cast<AVHWDeviceContext*>(hwdeviceref->data);
if (!hwdevicecontext || !hwdevicecontext->hwctx)
{
interop->DecrRef();
return -1;
}
// Initialise device context
if (av_hwdevice_ctx_init(hwdeviceref) < 0)
{
LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to initialise device context");
av_buffer_unref(&hwdeviceref);
interop->DecrRef();
return -1;
}
Context->hw_device_ctx = hwdeviceref;
return 0;
}