350 changes: 350 additions & 0 deletions mythtv/libs/libmythtv/mythdeinterlacer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,350 @@
// MythTV
#include "mythlogging.h"
#include "mythavutil.h"
#include "mythdeinterlacer.h"

extern "C" {
#include "libavfilter/buffersrc.h"
#include "libavfilter/buffersink.h"
}

#define LOC QString("MythDeint: ")

/*! \class MythDeinterlacer
* \brief Handles software based deinterlacing of video frames.
*
* Based upon the deinterlacing preferences passed in through the video
* frame, MythDeinterlacer will deinterlace the given frame using the appropriate
* quality and using single or double frame rate.
*
* The following deinterlacers are used:
* Basic - a simple onefield/bob (N.B. Subject to change!)
* Medium - libavfilter's yadif
* High - libavfilter's bwdif
*
* \note libavfilter frame doubling filters expect frames to be presented
* in the correct order and will break if they do not receive a frame followed
* by the retrieval of 2 'fields'.
* \note There is no support for deinterlacig NV12 frame formats
*/
MythDeinterlacer::~MythDeinterlacer()
{
Cleanup();
}

/*! \brief Deinterlace Frame if needed
*
* Software deinterlacing is the choice of last resort i.e. shader or driver
* based deinterlacers are always preferred if available.
*
* If using double rate deinterlacing, the frame must be presented in the correct
* order i.e. kScan_Interlaced followed by kScan_Intr2ndField.
*
* The appropriate field to deinterlace is determined by the scan type and the flags
* for interlaced_reverse and top_field_first in VideoFrame.
*
* libavilter's must be recreated if any parameter is changed as there is no method
* to select the field 'on the fly'.
*/
void MythDeinterlacer::Filter(VideoFrame *Frame, FrameScanType Scan)
{
// nothing to see here
if (!Frame || (Scan != kScan_Interlaced && Scan != kScan_Intr2ndField))
{
Cleanup();
return;
}

// Sanity check frame format - no NV12 support in libavfilter
if (!format_is_yuv(Frame->codec) || format_is_nv12(Frame->codec))
{
Cleanup();
return;
}

// check for software deinterlacing
bool doublerate = true;
bool topfieldfirst = Frame->interlaced_reversed ? !Frame->top_field_first : Frame->top_field_first;

MythDeintType deinterlacer = GetDoubleRateOption(Frame, DEINT_CPU);
MythDeintType other = GetDoubleRateOption(Frame, DEINT_SHADER);
if (other)
{
Cleanup();
return;
}

if (!deinterlacer)
{
doublerate = false;
deinterlacer = GetSingleRateOption(Frame, DEINT_CPU);
other = GetSingleRateOption(Frame, DEINT_SHADER);
if (!deinterlacer || other)
{
Cleanup();
return;
}
}

// Check for a change in input or deinterlacer
if (Frame->width != m_width || Frame->height != m_height ||
Frame->codec != m_inputType || Frame->pix_fmt != m_inputFmt ||
deinterlacer != m_deintType || doublerate != m_doubleRate ||
topfieldfirst != m_topFirst)
{
LOG(VB_GENERAL, LOG_INFO, LOC +
QString("Deinterlacer change: %1x%2 %3 dr:%4 tff:%5 -> %6x%7 %8 dr:%9 tff:%10")
.arg(m_width).arg(m_height).arg(format_description(m_inputType))
.arg(m_doubleRate).arg(m_topFirst)
.arg(Frame->width).arg(Frame->height).arg(format_description(Frame->codec))
.arg(doublerate).arg(topfieldfirst));
if (!Initialise(Frame, deinterlacer, doublerate, topfieldfirst))
{
Cleanup();
return;
}
}

// onefield or bob
if (m_deintType == DEINT_BASIC)
{
if (m_doubleRate)
{
// create a VideoFrame to cache the second field
if (!m_bobFrame)
{
m_bobFrame = new VideoFrame;
if (!m_bobFrame)
return;
memset(m_bobFrame, 0, sizeof(VideoFrame));
LOG(VB_PLAYBACK, LOG_INFO, "Created new 'bob' cache frame");
}

// copy Frame metadata, preserving any existing buffer allocation
unsigned char *buf = m_bobFrame->buf;
int size = m_bobFrame->size;
memcpy(m_bobFrame, Frame, sizeof(VideoFrame_));
m_bobFrame->priv[0] = m_bobFrame->priv[1] = m_bobFrame->priv[2] = m_bobFrame->priv[3] = nullptr;
m_bobFrame->buf = buf;
m_bobFrame->size = size;

if (!m_bobFrame->buf || (m_bobFrame->size != Frame->size))
{
av_free(m_bobFrame->buf);
m_bobFrame->buf = static_cast<unsigned char*>(av_malloc(static_cast<size_t>(Frame->size + 64)));
m_bobFrame->size = Frame->size;
}

if (!m_bobFrame->buf)
return;

if (kScan_Interlaced == Scan)
{
// cache the other field
OneField(Frame, m_bobFrame, !m_topFirst);
// double the current
OneField(Frame, Frame, m_topFirst);
}
else
{
// retrieve the cached field
OneField(m_bobFrame, Frame, m_topFirst);
// and double it
OneField(Frame, Frame, !m_topFirst);
}
}
else
{
OneField(Frame, Frame, Scan == kScan_Interlaced ? m_topFirst : !m_topFirst);
}
return;
}

// We need a filter
if (!m_graph)
return;

// Convert VideoFrame to AVFrame - no copy
if (AVPictureFill(m_frame, Frame, m_inputFmt) < 1)
{
LOG(VB_GENERAL, LOG_ERR, LOC + "Error converting frame");
return;
}

m_frame->width = Frame->width;
m_frame->height = Frame->height;
m_frame->format = Frame->pix_fmt;

// Add frame on first pass only
if (kScan_Interlaced == Scan)
{
if (av_buffersrc_add_frame(m_source, m_frame) < 0)
{
LOG(VB_GENERAL, LOG_ERR, LOC + "Error adding frame");
return;
}
}

// Retrieve frame
int res = av_buffersink_get_frame(m_sink, m_frame);
if (res < 0)
{
if (res == AVERROR(EAGAIN))
return;
LOG(VB_GENERAL, LOG_ERR, LOC + "Error retrieving frame");
return;
}

// Ensure AVFrame is in the expected format
if ((m_frame->format != m_inputFmt) || (Frame->pitches[0] < m_frame->linesize[0]) ||
(Frame->pitches[1] < m_frame->linesize[1]) || (Frame->pitches[2] < m_frame->linesize[2]))
{
LOG(VB_GENERAL, LOG_ERR, LOC + "Filter returned unexpected format");
return;
}

// Copy AVFrame back to VideoFrame
uint count = planes(Frame->codec);
for (uint plane = 0; plane < count; ++plane)
copyplane(Frame->buf + Frame->offsets[plane], Frame->pitches[plane], m_frame->data[plane], m_frame->linesize[plane],
pitch_for_plane(m_inputType, m_frame->width, plane), height_for_plane(m_inputType, m_frame->height, plane));

// Free frame data
av_frame_unref(m_frame);
}

void MythDeinterlacer::Cleanup(void)
{
if (m_graph)
{
LOG(VB_PLAYBACK, LOG_INFO, LOC + "Removing CPU deinterlacer");
avfilter_graph_free(&m_graph);
}

if (m_bobFrame)
{
LOG(VB_PLAYBACK, LOG_INFO, LOC + "Removing 'bob' cache frame");
av_free(m_bobFrame->buf);
delete m_bobFrame;
m_bobFrame = nullptr;
}

m_deintType = DEINT_NONE;
}

///\brief Initialise deinterlacing using the given MythDeintType
bool MythDeinterlacer::Initialise(VideoFrame *Frame, MythDeintType Deinterlacer,
bool DoubleRate, bool TopFieldFirst)
{
Cleanup();
m_source = nullptr;
m_sink = nullptr;

if (!Frame)
return false;

m_width = Frame->width;
m_height = Frame->height;
m_inputType = Frame->codec;
m_inputFmt = static_cast<AVPixelFormat>(Frame->pix_fmt);
QString name = DeinterlacerName(Deinterlacer | DEINT_CPU, DoubleRate);

// simple onefield/bob?
if (Deinterlacer == DEINT_BASIC)
{
LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Using deinterlacer '%1'").arg(name));
m_deintType = Deinterlacer;
m_doubleRate = DoubleRate;
m_topFirst = TopFieldFirst;
return true;
}

// Sanity check the frame formats
if (PixelFormatToFrameType(m_inputFmt) != m_inputType)
{
LOG(VB_GENERAL, LOG_ERR, LOC + "Inconsistent frame formats");
return false;
}

m_graph = avfilter_graph_alloc();
if (!m_graph)
return false;

AVFilterInOut* inputs = nullptr;
AVFilterInOut* outputs = nullptr;

QString deint;
switch (Deinterlacer)
{
case DEINT_MEDIUM:
deint = QString("yadif=mode=%1:parity=%2").arg(DoubleRate ? 1 : 0).arg(TopFieldFirst ? 0 : 1);
break;
case DEINT_HIGH:
deint = QString("bwdif=mode=%1:parity=%2").arg(DoubleRate ? 1 : 0).arg(TopFieldFirst ? 0 : 1);
break;
default: break;
}

if (deint.isEmpty())
return false;

QString graph = QString("buffer=video_size=%1x%2:pix_fmt=%3:time_base=1/1[in];[in]%4[out];[out] buffersink")
.arg(m_width).arg(m_height).arg(m_inputFmt).arg(deint);

int res = avfilter_graph_parse2(m_graph, graph.toLatin1().constData(), &inputs, &outputs);
if (res >= 0 && !inputs && !outputs)
{
res = avfilter_graph_config(m_graph, nullptr);
if (res >= 0)
{
m_source = avfilter_graph_get_filter(m_graph, "Parsed_buffer_0");
m_sink = avfilter_graph_get_filter(m_graph, "Parsed_buffersink_2");

if (m_source && m_sink)
{
LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created deinterlacer '%1'").arg(name));
m_deintType = Deinterlacer;
m_doubleRate = DoubleRate;
m_topFirst = TopFieldFirst;
return true;
}
}
}

LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create avfilter");
m_deintType = DEINT_NONE;
avfilter_inout_free(&inputs);
avfilter_inout_free(&outputs);
return false;
}

///\brief Copy a field from Source to Dest
void MythDeinterlacer::OneField(VideoFrame* Source, VideoFrame* Dest, bool Top)
{
if (!Source || !Dest)
return;
if (Source->codec != Dest->codec || Source->width != Dest->width || Source->height != Dest->height)
return;

uint srcplanes = planes(Source->codec);
for (uint plane = 0; plane < srcplanes; ++plane)
{
int height = height_for_plane(Source->codec, Source->height, plane);
// it's pointless trying to deinterlace vertically subsampled chroma
if (height < Source->height)
break;
int width = pitch_for_plane(Source->codec, Source->width, plane);
int srcpitch = Source->pitches[plane];
int dstpitch = Dest->pitches[plane];
unsigned char *src = Source->buf + Source->offsets[plane] + (Top ? 0 : srcpitch);
unsigned char *dst = Dest->buf + Dest->offsets[plane] + (Top ? dstpitch : 0);
srcpitch *= 2;
dstpitch *= 2;
for (int y = 0; y < height; y += 2)
{
memcpy(dst, src, static_cast<size_t>(width));
src += srcpitch;
dst += dstpitch;
}
}
}
44 changes: 44 additions & 0 deletions mythtv/libs/libmythtv/mythdeinterlacer.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#ifndef MYTHDEINTERLACER_H
#define MYTHDEINTERLACER_H

// Qt
#include <QSize>

// MythTV
#include "videoouttypes.h"
#include "mythavutil.h"

extern "C" {
#include "libavfilter/avfilter.h"
}

class MythDeinterlacer
{
public:
MythDeinterlacer() = default;
~MythDeinterlacer();

void Filter (VideoFrame *Frame, FrameScanType Scan);

private:
bool Initialise (VideoFrame *Frame, MythDeintType Deinterlacer,
bool DoubleRate, bool TopFieldFirst);
inline void Cleanup (void);
static void OneField (VideoFrame *Source, VideoFrame *Dest, bool Top);

private:
VideoFrameType m_inputType { FMT_NONE };
AVPixelFormat m_inputFmt { AV_PIX_FMT_NONE };
int m_width { 0 };
int m_height { 0 };
MythDeintType m_deintType { DEINT_NONE };
bool m_doubleRate { false };
bool m_topFirst { true };
MythAVFrame m_frame { };
AVFilterGraph* m_graph { nullptr };
AVFilterContext* m_source { nullptr };
AVFilterContext* m_sink { nullptr };
VideoFrame* m_bobFrame { nullptr };
};

#endif // MYTHDEINTERLACER_H
34 changes: 34 additions & 0 deletions mythtv/libs/libmythtv/mythframe.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,12 @@ void framecopy(VideoFrame* dst, const VideoFrame* src, bool useSSE)
dst->interlaced_frame = src->interlaced_frame;
dst->repeat_pict = src->repeat_pict;
dst->top_field_first = src->top_field_first;
dst->interlaced_reversed = src->interlaced_reversed;
dst->colorspace = src->colorspace;
dst->colorrange = src->colorrange;
dst->colorprimaries = src->colorprimaries;
dst->colortransfer = src->colortransfer;
dst->chromalocation = src->chromalocation;

if (FMT_YV12 == codec)
{
Expand Down Expand Up @@ -616,7 +621,12 @@ void MythUSWCCopy::copy(VideoFrame *dst, const VideoFrame *src)
dst->interlaced_frame = src->interlaced_frame;
dst->repeat_pict = src->repeat_pict;
dst->top_field_first = src->top_field_first;
dst->interlaced_reversed = src->interlaced_reversed;
dst->colorspace = src->colorspace;
dst->colorrange = src->colorrange;
dst->colorprimaries = src->colorprimaries;
dst->colortransfer = src->colortransfer;
dst->chromalocation = src->chromalocation;

int width = src->width;
int height = src->height;
Expand Down Expand Up @@ -806,3 +816,27 @@ int ColorDepth(int Format)
return 8;
}

MythDeintType GetSingleRateOption(const VideoFrame* Frame, MythDeintType Type)
{
if (!Frame)
return DEINT_NONE;
MythDeintType options = Frame->deinterlace_single & Frame->deinterlace_allowed;
if (!(options & Type))
return DEINT_NONE;
return GetDeinterlacer(options);
}

MythDeintType GetDoubleRateOption(const VideoFrame* Frame, MythDeintType Type)
{
if (!Frame)
return DEINT_NONE;
MythDeintType options = Frame->deinterlace_double & Frame->deinterlace_allowed;
if (!(options & Type))
return DEINT_NONE;
return GetDeinterlacer(options);
}

MythDeintType GetDeinterlacer(MythDeintType Option)
{
return Option & (DEINT_BASIC | DEINT_MEDIUM | DEINT_HIGH);
}
209 changes: 128 additions & 81 deletions mythtv/libs/libmythtv/mythframe.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,11 @@ static inline int format_is_hw(VideoFrameType Type)
(Type == FMT_NVDEC);
}

static inline int format_is_hwyuv(VideoFrameType Type)
{
return (Type == FMT_NVDEC) || (Type == FMT_VTB);
}

static inline int format_is_yuv(VideoFrameType Type)
{
return (Type == FMT_YV12) || (Type == FMT_YUV422P) ||
Expand All @@ -64,6 +69,33 @@ static inline int format_is_yuv(VideoFrameType Type)
(Type == FMT_P010) || (Type == FMT_P016);
}

static inline int format_is_420(VideoFrameType Type)
{
return (Type == FMT_YV12) || (Type == FMT_YUV420P10) ||
(Type == FMT_YUV420P12) || (Type == FMT_YUV420P16);
}

static inline int format_is_nv12(VideoFrameType Type)
{
return (Type == FMT_NV12) || (Type == FMT_P010) || (Type == FMT_P016);
}

typedef enum MythDeintType
{
DEINT_NONE = 0x0000,
DEINT_BASIC = 0x0001,
DEINT_MEDIUM = 0x0002,
DEINT_HIGH = 0x0004,
DEINT_CPU = 0x0010,
DEINT_SHADER = 0x0020,
DEINT_DRIVER = 0x0040,
DEINT_ALL = 0xFFFF
} MythDeintType;

inline MythDeintType operator| (MythDeintType a, MythDeintType b) { return static_cast<MythDeintType>(static_cast<int>(a) | static_cast<int>(b)); }
inline MythDeintType operator& (MythDeintType a, MythDeintType b) { return static_cast<MythDeintType>(static_cast<int>(a) & static_cast<int>(b)); }
inline MythDeintType operator~ (MythDeintType a) { return static_cast<MythDeintType>(~(static_cast<int>(a))); }

typedef struct VideoFrame_
{
VideoFrameType codec;
Expand All @@ -75,29 +107,29 @@ typedef struct VideoFrame_
double frame_rate;
int bpp;
int size;

long long frameNumber;
long long timecode;
int64_t disp_timecode;

unsigned char *priv[4]; ///< random empty storage

unsigned char *qscale_table;
int qstride;

int interlaced_frame; ///< 1 if interlaced.
int top_field_first; ///< 1 if top field is first.
int interlaced_reversed; /// 1 for user override of scan
int repeat_pict;
int forcekey; ///< hardware encoded .nuv
int dummy;

int pitches[3]; ///< Y, U, & V pitches
int offsets[3]; ///< Y, U, & V offsets

int pix_fmt;
int sw_pix_fmt;
int directrendering; ///< 1 if managed by FFmpeg
int colorspace;
int colorrange;
int colorprimaries;
int colortransfer;
int chromalocation;
MythDeintType deinterlace_single;
MythDeintType deinterlace_double;
MythDeintType deinterlace_allowed;
} VideoFrame;

#ifdef __cplusplus
Expand All @@ -108,6 +140,10 @@ int MTV_PUBLIC ColorDepth(int Format);

#ifdef __cplusplus

MythDeintType MTV_PUBLIC GetSingleRateOption(const VideoFrame* Frame, MythDeintType Type);
MythDeintType MTV_PUBLIC GetDoubleRateOption(const VideoFrame* Frame, MythDeintType Type);
MythDeintType MTV_PUBLIC GetDeinterlacer(MythDeintType Option);

enum class uswcState {
Detect,
Use_SSE,
Expand Down Expand Up @@ -143,9 +179,9 @@ static inline void init(VideoFrame *vf, VideoFrameType _codec,
float _aspect = -1.0F, double _rate = -1.0F,
int _aligned = 64);
static inline void clear(VideoFrame *vf);
static inline bool compatible(const VideoFrame *a,
const VideoFrame *b);
static inline int bitsperpixel(VideoFrameType type);
static inline int pitch_for_plane(VideoFrameType Type, int Width, uint Plane);
static inline int height_for_plane(VideoFrameType Type, int Height, uint Plane);

static inline void init(VideoFrame *vf, VideoFrameType _codec,
unsigned char *_buf, int _width, int _height,
Expand All @@ -159,15 +195,11 @@ static inline void init(VideoFrame *vf, VideoFrameType _codec,
vf->height = _height;
vf->aspect = _aspect;
vf->frame_rate = _rate;

vf->size = _size;
vf->frameNumber = 0;
vf->timecode = 0;

vf->qscale_table = nullptr;
vf->qstride = 0;

vf->interlaced_frame = 1;
vf->interlaced_reversed = 0;
vf->top_field_first = 1;
vf->repeat_pict = 0;
vf->forcekey = 0;
Expand All @@ -176,10 +208,17 @@ static inline void init(VideoFrame *vf, VideoFrameType _codec,
vf->sw_pix_fmt = -1; // AV_PIX_FMT_NONE
vf->directrendering = 1;
vf->colorspace = 1; // BT.709
vf->colorrange = 1; // normal/mpeg
vf->colorprimaries = 1; // BT.709
vf->colortransfer = 1; // BT.709
vf->chromalocation = 1; // default 4:2:0
vf->deinterlace_single = DEINT_NONE;
vf->deinterlace_double = DEINT_NONE;
vf->deinterlace_allowed = DEINT_NONE;

memset(vf->priv, 0, 4 * sizeof(unsigned char *));

uint width_aligned;
int width_aligned;
if (!_aligned)
{
width_aligned = _width;
Expand All @@ -195,34 +234,8 @@ static inline void init(VideoFrame *vf, VideoFrameType _codec,
}
else
{
if (FMT_YV12 == _codec || FMT_YUV422P == _codec)
{
vf->pitches[0] = width_aligned;
vf->pitches[1] = vf->pitches[2] = (width_aligned+1) >> 1;
}
else if (FMT_YUV420P10 == _codec || FMT_YUV420P12 == _codec ||
FMT_YUV420P16 == _codec)
{
vf->pitches[0] = width_aligned << 1;
vf->pitches[1] = vf->pitches[2] = width_aligned;
}
else if (FMT_NV12 == _codec)
{
vf->pitches[0] = width_aligned;
vf->pitches[1] = width_aligned;
vf->pitches[2] = 0;
}
else if (FMT_P010 == _codec || FMT_P016 == _codec)
{
vf->pitches[0] = width_aligned << 1;
vf->pitches[1] = width_aligned << 1;
vf->pitches[2] = 0;
}
else
{
vf->pitches[0] = (width_aligned * vf->bpp) >> 3;
vf->pitches[1] = vf->pitches[2] = 0;
}
for (int i = 0; i < 3; ++i)
vf->pitches[i] = pitch_for_plane(_codec, width_aligned, i);
}

if (o)
Expand All @@ -231,46 +244,106 @@ static inline void init(VideoFrame *vf, VideoFrameType _codec,
}
else
{
vf->offsets[0] = 0;
if (FMT_YV12 == _codec)
{
vf->offsets[0] = 0;
vf->offsets[1] = width_aligned * _height;
vf->offsets[2] =
vf->offsets[1] + ((width_aligned+1) >> 1) * ((_height+1) >> 1);
vf->offsets[2] = vf->offsets[1] + ((width_aligned + 1) >> 1) * ((_height+1) >> 1);
}
else if (FMT_YUV420P10 == _codec || FMT_YUV420P12 == _codec ||
FMT_YUV420P16 == _codec)
{
vf->offsets[0] = 0;
vf->offsets[1] = (width_aligned << 1) * _height;
vf->offsets[2] = vf->offsets[1] + (width_aligned * (_height >> 1));
}
else if (FMT_YUV422P == _codec)
{
vf->offsets[0] = 0;
vf->offsets[1] = width_aligned * _height;
vf->offsets[2] =
vf->offsets[1] + ((width_aligned+1) >> 1) * _height;
vf->offsets[2] = vf->offsets[1] + ((width_aligned + 1) >> 1) * _height;
}
else if (FMT_NV12 == _codec)
{
vf->offsets[0] = 0;
vf->offsets[1] = width_aligned * _height;
vf->offsets[2] = 0;
}
else if (FMT_P010 == _codec || FMT_P016 == _codec)
{
vf->offsets[0] = 0;
vf->offsets[1] = (width_aligned << 1) * _height;
vf->offsets[2] = 0;
}
else
{
vf->offsets[0] = vf->offsets[1] = vf->offsets[2] = 0;
vf->offsets[1] = vf->offsets[2] = 0;
}
}
}

static inline int pitch_for_plane(VideoFrameType Type, int Width, uint Plane)
{
switch (Type)
{
case FMT_YV12:
case FMT_YUV422P:
if (Plane == 0) return Width;
if (Plane < 3) return (Width + 1) >> 1;
break;
case FMT_YUV420P10:
case FMT_YUV420P12:
case FMT_YUV420P16:
if (Plane == 0) return Width << 1;
if (Plane < 3) return Width;
break;
case FMT_NV12:
if (Plane < 2) return Width;
break;
case FMT_P010:
case FMT_P016:
if (Plane < 2) return Width << 1;
break;
case FMT_YUY2:
case FMT_YUYVHQ:
case FMT_RGB24:
case FMT_ARGB32:
case FMT_RGBA32:
case FMT_BGRA:
case FMT_RGB32:
if (Plane == 0) return (bitsperpixel(Type) * Width) >> 3;
break;
default: break; // None and hardware formats
}
return 0;
}

static inline int height_for_plane(VideoFrameType Type, int Height, uint Plane)
{
switch (Type)
{
case FMT_YV12:
case FMT_YUV422P:
case FMT_YUV420P10:
case FMT_YUV420P12:
case FMT_YUV420P16:
if (Plane == 0) return Height;
if (Plane < 3) return Height >> 1;
break;
case FMT_NV12:
case FMT_P010:
case FMT_P016:
if (Plane < 2) return Height;
break;
case FMT_YUY2:
case FMT_YUYVHQ:
case FMT_RGB24:
case FMT_ARGB32:
case FMT_RGBA32:
case FMT_BGRA:
case FMT_RGB32:
if (Plane == 0) return Height;
break;
default: break; // None and hardware formats
}
return 0;
}
static inline void clear(VideoFrame *vf)
{
if (!vf)
Expand Down Expand Up @@ -335,36 +408,12 @@ static inline void copyplane(uint8_t* dst, int dst_pitch,
{
for (int y = 0; y < height; y++)
{
memcpy(dst, src, width);
memcpy(dst, src, static_cast<size_t>(width));
src += src_pitch;
dst += dst_pitch;
}
}

static inline bool compatible(const VideoFrame *a, const VideoFrame *b)
{
if (a && b && a->codec == b->codec &&
(a->codec == FMT_YV12 || a->codec == FMT_NV12))
{
return (a->codec == b->codec) &&
(a->width == b->width) &&
(a->height == b->height) &&
(a->size == b->size);
}

return a && b &&
(a->codec == b->codec) &&
(a->width == b->width) &&
(a->height == b->height) &&
(a->size == b->size) &&
(a->offsets[0] == b->offsets[0]) &&
(a->offsets[1] == b->offsets[1]) &&
(a->offsets[2] == b->offsets[2]) &&
(a->pitches[0] == b->pitches[0]) &&
(a->pitches[1] == b->pitches[1]) &&
(a->pitches[2] == b->pitches[2]);
}

/**
* copy: copy one frame into another
* copy only works with the following assumptions:
Expand Down Expand Up @@ -498,9 +547,7 @@ static inline void copybuffer(uint8_t *dstbuffer, const VideoFrame *src,
int pitch = 0, VideoFrameType type = FMT_YV12)
{
if (pitch == 0)
{
pitch = src->width;
}

if (type == FMT_YV12)
{
Expand Down
229 changes: 66 additions & 163 deletions mythtv/libs/libmythtv/mythplayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,7 @@ MythPlayer::MythPlayer(PlayerFlags flags)
needNewPauseFrame(false),
bufferPaused(false), videoPaused(false),
allpaused(false), playing(false),
m_double_framerate(false), m_double_process(false),
m_deint_possible(true),
m_double_framerate(false),
livetv(false),
watchingrecording(false),
transcoding(false),
Expand Down Expand Up @@ -589,9 +588,8 @@ void MythPlayer::ReinitVideo(void)
{
MythMultiLocker locker({&osdLock, &vidExitLock});

videoOutput->SetVideoFrameRate(video_frame_rate);
float aspect = (forced_video_aspect > 0) ? forced_video_aspect :
video_aspect;
videoOutput->SetVideoFrameRate(static_cast<float>(video_frame_rate));
float aspect = (forced_video_aspect > 0) ? forced_video_aspect : video_aspect;
if (!videoOutput->InputChanged(video_dim, video_disp_dim, aspect,
decoder->GetVideoCodecID(), aspect_only, &locker))
{
Expand Down Expand Up @@ -671,23 +669,6 @@ void MythPlayer::SetKeyframeDistance(int keyframedistance)
keyframedist = (keyframedistance > 0) ? keyframedistance : keyframedist;
}

/** \fn MythPlayer::FallbackDeint(void)
* \brief Fallback to non-frame-rate-doubling deinterlacing method.
*/
void MythPlayer::FallbackDeint(void)
{
m_double_framerate = false;
m_double_process = false;

if (videoOutput)
{
videoOutput->SetupDeinterlace(false);
bool hwset = decoder->GetMythCodecContext()->FallbackDeint();
if (!hwset)
videoOutput->FallbackDeint();
}
}

void MythPlayer::AutoDeint(VideoFrame *frame, bool allow_lock)
{
if (!frame || m_scan_locked)
Expand Down Expand Up @@ -761,56 +742,24 @@ void MythPlayer::SetScanType(FrameScanType scan)

resetScan = kScan_Ignore;

if (m_scan_initialized &&
m_scan == scan &&
m_frame_interval == frame_interval)
if (m_scan_initialized && m_scan == scan && m_frame_interval == frame_interval)
return;

m_scan_locked = (scan != kScan_Detect);

m_scan_initialized = true;
m_frame_interval = frame_interval;

bool interlaced = is_interlaced(scan);

if (interlaced && !m_deint_possible)
if (is_interlaced(scan))
{
m_scan = scan;
return;
}

if (interlaced)
{
m_deint_possible = videoOutput->SetDeinterlacingEnabled(true);
if (!m_deint_possible)
{
LOG(VB_GENERAL, LOG_INFO, LOC + "Unable to enable Video Output based deinterlacing");
m_scan = scan;
return;
}
if (videoOutput->NeedsDoubleFramerate())
{
m_double_framerate = true;
if (!CanSupportDoubleRate())
{
LOG(VB_GENERAL, LOG_ERR, LOC +
"Video sync method can't support double framerate "
"(refresh rate too low for 2x deint)");
FallbackDeint();
}
}
m_double_process = videoOutput->IsExtraProcessingRequired();
LOG(VB_PLAYBACK, LOG_INFO, LOC + "Enabled Video Output based deinterlacing");
bool normal = play_speed > 0.99F && play_speed < 1.01F && normal_speed;
m_double_framerate = CanSupportDoubleRate() && normal;
videoOutput->SetDeinterlacing(true, m_double_framerate);
}
else
else if (kScan_Progressive == scan)
{
if (kScan_Progressive == scan)
{
m_double_process = false;
m_double_framerate = false;
videoOutput->SetDeinterlacingEnabled(false);
LOG(VB_PLAYBACK, LOG_INFO, LOC + "Disabled Video Output based deinterlacing");
}
m_double_framerate = false;
videoOutput->SetDeinterlacing(false, false);
}

m_scan = scan;
Expand Down Expand Up @@ -849,7 +798,7 @@ void MythPlayer::SetVideoParams(int width, int height, double fps,
if (!codecName.isEmpty())
{
m_codecName = codecName;
paramsChanged = true;
paramsChanged = true;
}

if (!paramsChanged)
Expand All @@ -861,7 +810,9 @@ void MythPlayer::SetVideoParams(int width, int height, double fps,
if (IsErrored())
return;

SetScanType(detectInterlace(scan, m_scan, video_frame_rate,
// ensure deinterlacers are correctly reset after a change
m_scan_initialized = false;
SetScanType(detectInterlace(scan, m_scan, static_cast<float>(video_frame_rate),
video_disp_dim.height()));
m_scan_locked = false;
m_scan_tracker = (m_scan == kScan_Interlaced) ? 2 : 0;
Expand Down Expand Up @@ -1848,8 +1799,11 @@ void MythPlayer::AVSync(VideoFrame *buffer, bool limit_delay)
}

if (avsync_next > 0)
{
avsync_next--;
else {
}
else
{
int divisor = int(abs(diverge) - max_diverge - 1.0F);
if (divisor < 1)
divisor=1;
Expand All @@ -1858,7 +1812,14 @@ void MythPlayer::AVSync(VideoFrame *buffer, bool limit_delay)

FrameScanType ps = m_scan;
if (kScan_Detect == m_scan || kScan_Ignore == m_scan)
{
ps = kScan_Progressive;
}
else if (buffer && is_interlaced(ps))
{
ps = kScan_Interlaced;
buffer->interlaced_reversed = m_scan == kScan_Intr2ndField;
}

bool max_video_behind = diverge < -max_diverge;
bool dropframe = false;
Expand Down Expand Up @@ -1952,19 +1913,22 @@ void MythPlayer::AVSync(VideoFrame *buffer, bool limit_delay)

if (m_double_framerate)
{
//second stage of deinterlacer processing
ps = (kScan_Intr2ndField == ps) ?
kScan_Interlaced : kScan_Intr2ndField;
// second stage of deinterlacer processing
if (ps == kScan_Interlaced)
ps = kScan_Intr2ndField;
osdLock.lock();
if (m_double_process && ps != kScan_Progressive)
videoOutput->ProcessFrame(buffer, osd, pip_players, ps);
if (ps != kScan_Progressive)
{
// Only double rate CPU deinterlacers require an extra call to ProcessFrame
if (GetDoubleRateOption(buffer, DEINT_CPU) && !GetDoubleRateOption(buffer, DEINT_SHADER))
videoOutput->ProcessFrame(buffer, osd, pip_players, ps);
}
videoOutput->PrepareFrame(buffer, ps, osd);
osdLock.unlock();
// Display the second field
if (!player_ctx->IsPBP() || player_ctx->IsPrimaryPBP())
{
LOG(VB_PLAYBACK | VB_TIMESTAMP, LOG_INFO,
LOC + QString("AVSync waitforframe %1 %2 %3")
LOG(VB_PLAYBACK | VB_TIMESTAMP, LOG_INFO, LOC + QString("AVSync waitforframe %1 %2 %3")
.arg(frameDelay).arg(avsync_adjustment).arg(m_double_framerate));
vsync_delay_clock = videosync->WaitForFrame(frameDelay, avsync_adjustment);
}
Expand Down Expand Up @@ -2237,7 +2201,14 @@ void MythPlayer::AVSync2(VideoFrame *buffer)

FrameScanType ps = m_scan;
if (kScan_Detect == m_scan || kScan_Ignore == m_scan)
{
ps = kScan_Progressive;
}
else if (buffer && is_interlaced(ps))
{
ps = kScan_Interlaced;
buffer->interlaced_reversed = m_scan == kScan_Intr2ndField;
}

if (buffer && !dropframe)
{
Expand Down Expand Up @@ -2289,11 +2260,15 @@ void MythPlayer::AVSync2(VideoFrame *buffer)
if (m_double_framerate)
{
//second stage of deinterlacer processing
ps = (kScan_Intr2ndField == ps) ?
kScan_Interlaced : kScan_Intr2ndField;
if (kScan_Interlaced == ps)
ps = kScan_Intr2ndField;
osdLock.lock();
if (m_double_process && ps != kScan_Progressive)
videoOutput->ProcessFrame(buffer, osd, pip_players, ps);
if (ps != kScan_Progressive)
{
// Only double rate CPU deinterlacers require an extra call to ProcessFrame
if (GetDoubleRateOption(buffer, DEINT_CPU) && !GetDoubleRateOption(buffer, DEINT_SHADER))
videoOutput->ProcessFrame(buffer, osd, pip_players, ps);
}
videoOutput->PrepareFrame(buffer, ps, osd);
osdLock.unlock();
// Display the second field
Expand Down Expand Up @@ -2598,36 +2573,18 @@ void MythPlayer::EnableFrameRateMonitor(bool enable)
{
if (!output_jmeter)
return;
int rate = enable ? video_frame_rate :
VERBOSE_LEVEL_CHECK(VB_PLAYBACK, LOG_ANY) ?
(video_frame_rate * 4) : 0;
output_jmeter->SetNumCycles(rate);
bool verbose = VERBOSE_LEVEL_CHECK(VB_PLAYBACK, LOG_ANY);
double rate = enable ? video_frame_rate : verbose ? (video_frame_rate * 4) : 0.0;
output_jmeter->SetNumCycles(static_cast<int>(rate));
}

void MythPlayer::ForceDeinterlacer(const QString &overridefilter)
{
if (!videoOutput)
return;

bool normal = play_speed > 0.99F && play_speed < 1.01F && normal_speed;
bool hwset = decoder->GetMythCodecContext()->setDeinterlacer(true, overridefilter);
if (hwset)
{
m_double_framerate = false;
m_double_process = false;
videoOutput->SetupDeinterlace(false);
}
else
{
m_double_framerate =
videoOutput->SetupDeinterlace(true, overridefilter) &&
videoOutput->NeedsDoubleFramerate();
m_double_process = videoOutput->IsExtraProcessingRequired();
}
if ((decoder->GetMythCodecContext()->getDoubleRate() || m_double_framerate)
&& (!CanSupportDoubleRate() || !normal))
FallbackDeint();

(void)overridefilter;
LOG(VB_GENERAL, LOG_ERR, LOC + "ForceDeinterlacer not yet re-implemented");
}

void MythPlayer::VideoStart(void)
Expand Down Expand Up @@ -2717,36 +2674,13 @@ void MythPlayer::VideoStart(void)
}
else if (videoOutput)
{
bool hwset = decoder->GetMythCodecContext()->setDeinterlacer(true);
if (hwset)
videoOutput->SetupDeinterlace(false);
else
{
// Set up deinterlacing in the video output method
m_double_framerate =
(videoOutput->SetupDeinterlace(true) &&
videoOutput->NeedsDoubleFramerate());

m_double_process = videoOutput->IsExtraProcessingRequired();
}
videosync = VideoSync::BestMethod(videoOutput, (uint)rf_int);

// Make sure video sync can do it
if (videosync != nullptr && m_double_framerate)
{
if (!CanSupportDoubleRate())
{
LOG(VB_GENERAL, LOG_ERR, LOC +
"Video sync method can't support double framerate "
"(refresh rate too low for 2x deint)");
FallbackDeint();
}
}
m_double_framerate = CanSupportDoubleRate();
videoOutput->SetDeinterlacing(true, m_double_framerate);
videosync = VideoSync::BestMethod(videoOutput, static_cast<uint>(rf_int));
}

if (!videosync)
{
videosync = new BusyWaitVideoSync(videoOutput, rf_int);
}

InitAVSync();
videosync->Start();
Expand Down Expand Up @@ -4090,38 +4024,11 @@ void MythPlayer::ChangeSpeed(void)

LOG(VB_PLAYBACK, LOG_INFO, LOC + "Play speed: " +
QString("rate: %1 speed: %2 skip: %3 => new interval %4")
.arg(video_frame_rate).arg(play_speed)
.arg(video_frame_rate).arg(static_cast<double>(play_speed))
.arg(ffrew_skip).arg(frame_interval));

if (videoOutput && videosync)
{
// We need to tell it this for automatic deinterlacer settings
videoOutput->SetVideoFrameRate(video_frame_rate);

// If using bob deinterlace, turn on or off if we
// changed to or from synchronous playback speed.
bool play_1 = play_speed > 0.99F && play_speed < 1.01F && normal_speed;
bool inter = (kScan_Interlaced == m_scan ||
kScan_Intr2ndField == m_scan);

bool doublerate = m_double_framerate || decoder->GetMythCodecContext()->getDoubleRate();
if (doublerate && !play_1)
{
bool hwdeint = decoder->GetMythCodecContext()->FallbackDeint();
if (!hwdeint)
videoOutput->FallbackDeint();
}
else if (!m_double_framerate && CanSupportDoubleRate() && play_1
&& (inter || decoder->GetMythCodecContext()->isDeinterlacing()))
{
videoOutput->SetupDeinterlace(false);
bool hwdeint = decoder->GetMythCodecContext()->BestDeint();
if (!hwdeint)
videoOutput->BestDeint();
}
m_double_framerate = videoOutput->NeedsDoubleFramerate();
m_double_process = videoOutput->IsExtraProcessingRequired();
}
videoOutput->SetVideoFrameRate(static_cast<float>(video_frame_rate));

if (normal_speed && audio.HasAudioOut())
{
Expand Down Expand Up @@ -5271,24 +5178,20 @@ int MythPlayer::GetStatusbarPos(void) const

void MythPlayer::GetPlaybackData(InfoMap &infoMap)
{
QString samplerate = RingBuffer::BitrateToString(audio.GetSampleRate(),
true);
QString samplerate = RingBuffer::BitrateToString(audio.GetSampleRate(), true);
infoMap.insert("samplerate", samplerate);
infoMap.insert("filename", player_ctx->m_buffer->GetSafeFilename());
infoMap.insert("decoderrate", player_ctx->m_buffer->GetDecoderRate());
infoMap.insert("storagerate", player_ctx->m_buffer->GetStorageRate());
infoMap.insert("bufferavail", player_ctx->m_buffer->GetAvailableBuffer());
infoMap.insert("buffersize",
QString::number(player_ctx->m_buffer->GetBufferSize() >> 20));
infoMap.insert("buffersize", QString::number(player_ctx->m_buffer->GetBufferSize() >> 20));
if (gCoreContext->GetBoolSetting("PlaybackAVSync2", false))
{
int avsync = avsync_avg / 1000;
infoMap.insert("avsync",
tr("%1 ms").arg(avsync));
infoMap.insert("avsync", tr("%1 ms").arg(avsync));
}
else
infoMap.insert("avsync",
QString::number((float)avsync_avg / (float)frame_interval, 'f', 2));
infoMap.insert("avsync", QString::number((float)avsync_avg / (float)frame_interval, 'f', 2));
if (videoOutput)
{
QString frames = QString("%1/%2").arg(videoOutput->ValidVideoFrames())
Expand Down
3 changes: 0 additions & 3 deletions mythtv/libs/libmythtv/mythplayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -638,7 +638,6 @@ class MTV_PUBLIC MythPlayer
void ResetAVSync(void);
int64_t AVSyncGetAudiotime(void);
void SetFrameInterval(FrameScanType scan, double frame_period);
void FallbackDeint(void);
void WaitForTime(int64_t framedue);

// Private LiveTV stuff
Expand Down Expand Up @@ -690,8 +689,6 @@ class MTV_PUBLIC MythPlayer
mutable QMutex vidExitLock;
mutable QMutex playingLock;
bool m_double_framerate;///< Output fps is double Video (input) rate
bool m_double_process;///< Output filter must processed at double rate
bool m_deint_possible;
bool livetv;
bool watchingrecording;
bool transcoding;
Expand Down
55 changes: 37 additions & 18 deletions mythtv/libs/libmythtv/mythvaapiinterop.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -149,11 +149,11 @@ VASurfaceID MythVAAPIInterop::VerifySurface(MythRenderOpenGL *Context, VideoFram

MythVAAPIInteropGLX::MythVAAPIInteropGLX(MythRenderOpenGL *Context, Type InteropType)
: MythVAAPIInterop(Context, InteropType),
m_lastSurface(0),
m_vaapiPictureAttributes(nullptr),
m_vaapiPictureAttributeCount(0),
m_vaapiHueBase(0),
m_vaapiColourSpace(0)
m_vaapiColourSpace(0),
m_deinterlacer(DEINT_NONE)
{
}

Expand All @@ -169,10 +169,39 @@ uint MythVAAPIInteropGLX::GetFlagsForFrame(VideoFrame *Frame, FrameScanType Scan
return flags;

// Set deinterlacing
if (Scan == kScan_Interlaced)
flags = Frame->top_field_first ? VA_TOP_FIELD : VA_BOTTOM_FIELD;
else if (Scan == kScan_Intr2ndField)
flags = Frame->top_field_first ? VA_BOTTOM_FIELD : VA_TOP_FIELD;
if (is_interlaced(Scan))
{
// As for VDPAU, only VAAPI can deinterlace these frames - so accept any deinterlacer
bool doublerate = true;
MythDeintType driverdeint = GetDoubleRateOption(Frame, DEINT_DRIVER | DEINT_CPU | DEINT_SHADER);
if (!driverdeint)
{
doublerate = false;
driverdeint = GetSingleRateOption(Frame, DEINT_DRIVER | DEINT_CPU | DEINT_SHADER);
}

if (driverdeint)
{
driverdeint = DEINT_BASIC;
if (m_deinterlacer != driverdeint)
{
LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Enabled deinterlacer '%1'")
.arg(DeinterlacerName(driverdeint | DEINT_DRIVER, doublerate, FMT_VAAPI)));
}

bool top = Frame->interlaced_reversed ? !Frame->top_field_first : Frame->top_field_first;
if (Scan == kScan_Interlaced)
flags = top ? VA_TOP_FIELD : VA_BOTTOM_FIELD;
else if (Scan == kScan_Intr2ndField)
flags = top ? VA_BOTTOM_FIELD : VA_TOP_FIELD;
m_deinterlacer = driverdeint;
}
else if (m_deinterlacer)
{
LOG(VB_PLAYBACK, LOG_INFO, LOC + "Disabled basic VAAPI deinterlacer");
m_deinterlacer = DEINT_NONE;
}
}

// Update colourspace
if (!m_vaapiColourSpace)
Expand Down Expand Up @@ -267,6 +296,7 @@ void MythVAAPIInteropGLX::InitPictureAttributes(VideoColourSpace *ColourSpace)
if (updatecscmatrix > -1)
{
// FIXME - this is untested. Presumably available with the VDPAU backend.
// UPDATE - not implemented in VDPAU backend. Looks like maybe Android only??
// If functioning correctly we need to turn off all of the other VA picture attributes
// as this acts, as for OpenGL, as the master colourspace conversion matrix.
// We can also enable Studio levels support.
Expand Down Expand Up @@ -432,15 +462,10 @@ vector<MythVideoTexture*> MythVAAPIInteropGLXCopy::Acquire(MythRenderOpenGL *Con
return result;
result = m_openglTextures[DUMMY_INTEROP_ID];

// Pause frame - no need to update the same frame
if (m_lastSurface == id)
return result;

// Copy surface to texture
INIT_ST;
va_status = vaCopySurfaceGLX(m_vaDisplay, m_glxSurface, id, GetFlagsForFrame(Frame, Scan));
CHECK_ST;
m_lastSurface = id;
return result;
}

Expand Down Expand Up @@ -574,10 +599,6 @@ vector<MythVideoTexture*> MythVAAPIInteropGLXPixmap::Acquire(MythRenderOpenGL *C
return result;
result = m_openglTextures[DUMMY_INTEROP_ID];

// Pause frame - no need to update the same frame
if (m_lastSurface == id)
return result;

// Copy the surface to the texture
INIT_ST;
va_status = vaSyncSurface(m_vaDisplay, id);
Expand All @@ -597,7 +618,6 @@ vector<MythVideoTexture*> MythVAAPIInteropGLXPixmap::Acquire(MythRenderOpenGL *C
m_glxBindTexImageEXT(glxdisplay, m_glxPixmap, GLX_FRONT_EXT, nullptr);
m_context->glBindTexture(QOpenGLTexture::Target2D, 0);
}
m_lastSurface = id;
return result;
}

Expand Down Expand Up @@ -758,8 +778,7 @@ vector<MythVideoTexture*> MythVAAPIInteropDRM::Acquire(MythRenderOpenGL *Context
sizes.push_back(size);
}

vector<MythVideoTexture*> textures =
MythVideoTexture::CreateTextures(m_context, FMT_VAAPI, format, sizes);
vector<MythVideoTexture*> textures = MythVideoTexture::CreateTextures(m_context, FMT_VAAPI, format, sizes);
if (textures.size() != count)
{
LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create all textures");
Expand Down
2 changes: 1 addition & 1 deletion mythtv/libs/libmythtv/mythvaapiinterop.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,11 @@ class MythVAAPIInteropGLX : public MythVAAPIInterop
void InitPictureAttributes(VideoColourSpace *ColourSpace);

protected:
VASurfaceID m_lastSurface;
VADisplayAttribute *m_vaapiPictureAttributes;
int m_vaapiPictureAttributeCount;
int m_vaapiHueBase;
uint m_vaapiColourSpace;
MythDeintType m_deinterlacer;
};

class MythVAAPIInteropGLXCopy : public MythVAAPIInteropGLX
Expand Down
50 changes: 46 additions & 4 deletions mythtv/libs/libmythtv/mythvdpauhelper.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,8 @@ void MythVDPAUHelper::DeleteOutputSurface(VdpOutputSurface Surface)
CHECK_ST
}

VdpVideoMixer MythVDPAUHelper::CreateMixer(QSize Size, VdpChromaType ChromaType)
VdpVideoMixer MythVDPAUHelper::CreateMixer(QSize Size, VdpChromaType ChromaType,
MythDeintType Deinterlacer)
{
if (!m_valid || Size.isEmpty())
return 0;
Expand All @@ -319,19 +320,60 @@ VdpVideoMixer MythVDPAUHelper::CreateMixer(QSize Size, VdpChromaType ChromaType)
uint height = static_cast<uint>(Size.height());
uint layers = 0;
void const * parametervalues[] = { &width, &height, &ChromaType, &layers};

uint32_t featurecount = 0;
VdpVideoMixerFeature features[2];
VdpBool enable = VDP_TRUE;
const VdpBool enables[2] = { enable, enable };

if (DEINT_MEDIUM == Deinterlacer || DEINT_HIGH == Deinterlacer)
{
features[featurecount] = VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL;
featurecount++;
}

if (DEINT_HIGH== Deinterlacer)
{
features[featurecount] = VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL;
featurecount++;
}

INIT_ST
status = m_vdpVideoMixerCreate(m_device, 0, nullptr, 4, parameters, parametervalues, &result);
status = m_vdpVideoMixerCreate(m_device, featurecount, featurecount ? features : nullptr,
4, parameters, parametervalues, &result);
CHECK_ST

if (!ok || !result)
{
LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create video mixer");
return result;
}

status = m_vdpVideoMixerSetFeatureEnables(result, featurecount, features, enables);
CHECK_ST
return result;
}

void MythVDPAUHelper::MixerRender(VdpVideoMixer Mixer, VdpVideoSurface Source, VdpOutputSurface Dest)
void MythVDPAUHelper::MixerRender(VdpVideoMixer Mixer, VdpVideoSurface Source,
VdpOutputSurface Dest, FrameScanType Scan, int TopFieldFirst)
{
if (!m_valid || !Mixer || !Source || !Dest)
return;

VdpVideoMixerPictureStructure field = VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME;
if (kScan_Interlaced == Scan)
{
field = TopFieldFirst ? VDP_VIDEO_MIXER_PICTURE_STRUCTURE_TOP_FIELD :
VDP_VIDEO_MIXER_PICTURE_STRUCTURE_BOTTOM_FIELD;
}
else if (kScan_Intr2ndField == Scan)
{
field = TopFieldFirst ? VDP_VIDEO_MIXER_PICTURE_STRUCTURE_BOTTOM_FIELD :
VDP_VIDEO_MIXER_PICTURE_STRUCTURE_TOP_FIELD;
}

INIT_ST
status = m_vdpVideoMixerRender(Mixer, VDP_INVALID_HANDLE, nullptr, VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME,
status = m_vdpVideoMixerRender(Mixer, VDP_INVALID_HANDLE, nullptr, field,
0, nullptr, Source, 0, nullptr, nullptr, Dest, nullptr, nullptr, 0, nullptr);
CHECK_ST
}
Expand Down
20 changes: 18 additions & 2 deletions mythtv/libs/libmythtv/mythvdpauhelper.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@
// Qt
#include <QSize>
#include <QMutex>
#include <QFlags>

// MythTV
#include "mythframe.h"
#include "videoouttypes.h"

// FFmpeg
extern "C" {
Expand All @@ -18,6 +23,15 @@ class VideoColourSpace;
class MythVDPAUHelper
{
public:
enum VDPMixerFeature
{
VDPMixerNone = 0x00,
VDPMixerTemporal = 0x01,
VDPMixerSpatial = 0x02
};

Q_DECLARE_FLAGS(VDPMixerFeatures, VDPMixerFeature)

static bool HaveVDPAU(void);
static bool HaveMPEG4Decode(void);
static bool CheckH264Decode(AVCodecContext *Context);
Expand All @@ -29,8 +43,10 @@ class MythVDPAUHelper
bool IsValid(void);
bool IsFeatureAvailable(uint Feature);
VdpOutputSurface CreateOutputSurface(QSize Size);
VdpVideoMixer CreateMixer(QSize Size, VdpChromaType ChromaType = VDP_CHROMA_TYPE_420);
void MixerRender(VdpVideoMixer Mixer, VdpVideoSurface Source, VdpOutputSurface Dest);
VdpVideoMixer CreateMixer(QSize Size, VdpChromaType ChromaType = VDP_CHROMA_TYPE_420,
MythDeintType Deinterlacer = DEINT_BASIC);
void MixerRender(VdpVideoMixer Mixer, VdpVideoSurface Source, VdpOutputSurface Dest,
FrameScanType Scan, int TopFieldFirst);
void SetCSCMatrix(VdpVideoMixer Mixer, VideoColourSpace *ColourSpace);
void DeleteOutputSurface(VdpOutputSurface Surface);
void DeleteMixer(VdpVideoMixer Mixer);
Expand Down
51 changes: 44 additions & 7 deletions mythtv/libs/libmythtv/mythvdpauinterop.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,17 +89,26 @@ bool MythVDPAUInterop::InitNV(AVVDPAUDeviceContext* DeviceContext)
return false;
}

bool MythVDPAUInterop::InitVDPAU(AVVDPAUDeviceContext* DeviceContext, VdpVideoSurface Surface)
bool MythVDPAUInterop::InitVDPAU(AVVDPAUDeviceContext* DeviceContext, VdpVideoSurface Surface,
MythDeintType Deint, bool DoubleRate)
{
if (!m_helper || !m_context || !Surface || !DeviceContext)
return false;

if (!(m_mixer && m_outputSurface))
if (!m_mixer)
{
VdpChromaType chroma = VDP_CHROMA_TYPE_420;
QSize size = m_helper->GetSurfaceParameters(Surface, chroma);
m_mixer = m_helper->CreateMixer(size, chroma, Deint);
m_deinterlacer = Deint;
LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Setup deinterlacer '%1'")
.arg(DeinterlacerName(m_deinterlacer | DEINT_DRIVER, DoubleRate, FMT_VDPAU)));
}

m_mixer = m_helper->CreateMixer(size, chroma);
if (!m_outputSurface)
{
VdpChromaType chroma = VDP_CHROMA_TYPE_420;
QSize size = m_helper->GetSurfaceParameters(Surface, chroma);
m_outputSurface = m_helper->CreateOutputSurface(size);
if (m_outputSurface)
{
Expand Down Expand Up @@ -148,7 +157,7 @@ bool MythVDPAUInterop::InitVDPAU(AVVDPAUDeviceContext* DeviceContext, VdpVideoSu
* \note There can only be one VDPAU context mapped to an OpenGL context. This causes
* a minor issue when seeking (usually with H.264) as the decoder is recreated but
* we still have a pause frame associated with the old decoder. Hence this interop is not
* released and remains bound to the OpenGL context. This resolves itseld once the pause
* released and remains bound to the OpenGL context. This resolves itself once the pause
* frame is replaced (i.e. after one new frame is displayed).
*
* \note We use a VdpVideoMixer to complete the conversion from YUV to RGB. Hence the returned
Expand All @@ -157,7 +166,7 @@ bool MythVDPAUInterop::InitVDPAU(AVVDPAUDeviceContext* DeviceContext, VdpVideoSu
vector<MythVideoTexture*> MythVDPAUInterop::Acquire(MythRenderOpenGL *Context,
VideoColourSpace *ColourSpace,
VideoFrame *Frame,
FrameScanType)
FrameScanType Scan)
{
vector<MythVideoTexture*> result;
if (!Frame)
Expand Down Expand Up @@ -202,8 +211,34 @@ vector<MythVideoTexture*> MythVDPAUInterop::Acquire(MythRenderOpenGL *Context,
if (!surface)
return result;

// Check for deinterlacing - VDPAU deinterlacers trump all others as we can only
// deinterlace VDPAU frames here. So accept any deinterlacer.
// N.B. basic deinterlacing requires no additional setup and is managed with
// the field/frame parameter
bool doublerate = true;
MythDeintType deinterlacer = DEINT_BASIC;
if (kScan_Interlaced == Scan || kScan_Intr2ndField == Scan)
{
MythDeintType driverdeint = GetDoubleRateOption(Frame, DEINT_DRIVER | DEINT_CPU | DEINT_SHADER);
if (!driverdeint)
{
doublerate = false;
driverdeint = GetSingleRateOption(Frame, DEINT_DRIVER | DEINT_CPU | DEINT_SHADER);
}

if (driverdeint)
deinterlacer = driverdeint;

// destroy the current mixer if necessary
if (deinterlacer != m_deinterlacer)
{
m_helper->DeleteMixer(m_mixer);
m_mixer = 0;
}
}

// We need a mixer, an output surface and mapped texture
if (!InitVDPAU(devicecontext, surface))
if (!InitVDPAU(devicecontext, surface, deinterlacer, doublerate))
return result;

// Update colourspace and initialise on first frame - after mixer is created
Expand All @@ -230,7 +265,9 @@ vector<MythVideoTexture*> MythVDPAUInterop::Acquire(MythRenderOpenGL *Context,
}

// Render surface
m_helper->MixerRender(m_mixer, surface, m_outputSurface);
m_helper->MixerRender(m_mixer, surface, m_outputSurface, Scan,
Frame->interlaced_reversed ? !Frame->top_field_first :
Frame->top_field_first);
return m_openglTextures[DUMMY_INTEROP_ID];
}

Expand Down
4 changes: 3 additions & 1 deletion mythtv/libs/libmythtv/mythvdpauinterop.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,15 @@ class MythVDPAUInterop : public MythOpenGLInterop

private:
bool InitNV(AVVDPAUDeviceContext* DeviceContext);
bool InitVDPAU(AVVDPAUDeviceContext* DeviceContext, VdpVideoSurface Surface);
bool InitVDPAU(AVVDPAUDeviceContext* DeviceContext, VdpVideoSurface Surface,
MythDeintType Deint, bool DoubleRate);

VideoColourSpace *m_colourSpace { nullptr };
MythVDPAUHelper *m_helper { nullptr };
VdpOutputSurface m_outputSurface { 0 };
MythVDPAUSurfaceNV m_outputSurfaceReg { 0 };
VdpVideoMixer m_mixer { 0 };
MythDeintType m_deinterlacer { DEINT_BASIC };
MYTH_VDPAUINITNV m_initNV { nullptr };
MYTH_VDPAUFININV m_finiNV { nullptr };
MYTH_VDPAUREGOUTSURFNV m_registerNV { nullptr };
Expand Down
174 changes: 94 additions & 80 deletions mythtv/libs/libmythtv/openglvideo.cpp

Large diffs are not rendered by default.

11 changes: 3 additions & 8 deletions mythtv/libs/libmythtv/openglvideo.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,6 @@ class OpenGLVideo : public QObject
void ProcessFrame(const VideoFrame *Frame);
void PrepareFrame(VideoFrame *Frame, bool TopFieldFirst, FrameScanType Scan,
StereoscopicMode Stereo, bool DrawBorder = false);
bool AddDeinterlacer(QString Deinterlacer);
void SetDeinterlacing(bool Deinterlacing);
QString GetDeinterlacer(void) const;
void SetMasterViewport(QSize Size);
QSize GetVideoSize(void) const;
QString GetProfile() const;
Expand All @@ -65,10 +62,10 @@ class OpenGLVideo : public QObject
private:
bool SetupFrameFormat(VideoFrameType InputType, VideoFrameType OutputType,
QSize Size, GLenum TextureTarget);
bool CreateVideoShader(VideoShaderType Type, QString Deinterlacer = QString());
bool CreateVideoShader(VideoShaderType Type, MythDeintType Deint = DEINT_NONE);
void LoadTextures(bool Deinterlacing, vector<MythVideoTexture*> &Current,
MythGLTexture** Textures, uint &TextureCount);
void TearDownDeinterlacer(void);
bool AddDeinterlacer(const VideoFrame *Frame, MythDeintType Filter = DEINT_SHADER);

bool m_valid;
QString m_profile;
Expand All @@ -81,9 +78,7 @@ class OpenGLVideo : public QObject
QRect m_displayVisibleRect; ///< Total useful, visible rectangle
QRect m_displayVideoRect; ///< Sub-rect of display_visible_rect for video
QRect m_videoRect; ///< Sub-rect of video_disp_dim to display (after zoom adjustments etc)
QString m_hardwareDeinterlacer;
QString m_queuedHardwareDeinterlacer; ///< Temporary prior to deinterlacing refactor
bool m_hardwareDeinterlacing; ///< OpenGL deinterlacing is enabled
MythDeintType m_deinterlacer;
VideoColourSpace *m_videoColourSpace;
bool m_viewportControl; ///< Video has control over view port
QOpenGLShaderProgram* m_shaders[ShaderCount] { nullptr };
Expand Down
16 changes: 7 additions & 9 deletions mythtv/libs/libmythtv/tv_play.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12052,15 +12052,13 @@ void TV::PlaybackMenuInit(const MenuBase &menu)
m_tvmSubsEnabled = ctx->m_player->GetCaptionsEnabled();
m_tvmSubsHaveText = ctx->m_player->HasTextSubtitles();
m_tvmSubsForcedOn = ctx->m_player->GetAllowForcedSubtitles();
ctx->m_player->GetVideoOutput()->GetDeinterlacers(m_tvmDeinterlacers);
QStringList decoderdeints
= ctx->m_player->GetMythCodecContext()->GetDeinterlacers();
m_tvmDeinterlacers.append(decoderdeints);
m_tvmCurrentDeinterlacer
= ctx->m_player->GetMythCodecContext()->getDeinterlacerName();
if (m_tvmCurrentDeinterlacer.isEmpty())
m_tvmCurrentDeinterlacer =
ctx->m_player->GetVideoOutput()->GetDeinterlacer();
//ctx->m_player->GetVideoOutput()->GetDeinterlacers(m_tvmDeinterlacers);
//QStringList decoderdeints = ctx->m_player->GetMythCodecContext()->GetDeinterlacers();
//m_tvmDeinterlacers.append(decoderdeints);
//m_tvmCurrentDeinterlacer = ctx->m_player->GetMythCodecContext()->getDeinterlacerName();
//if (m_tvmCurrentDeinterlacer.isEmpty())
// m_tvmCurrentDeinterlacer =
// ctx->m_player->GetVideoOutput()->GetDeinterlacer();
if (m_tvmVisual)
m_tvmVisualisers = ctx->m_player->GetVisualiserList();
VideoOutput *vo = ctx->m_player->GetVideoOutput();
Expand Down
68 changes: 42 additions & 26 deletions mythtv/libs/libmythtv/videobuffers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,8 @@ void VideoBuffers::Init(uint NumDecode, bool ExtraForPause,
memset(At(i), 0, sizeof(VideoFrame));
At(i)->codec = FMT_NONE;
At(i)->interlaced_frame = -1;
At(i)->top_field_first = +1;
m_vbufferMap[At(i)] = i;
At(i)->top_field_first = 1;
m_vbufferMap[At(i)] = i;
}

m_needFreeFrames = NeedFree;
Expand All @@ -211,6 +211,35 @@ void VideoBuffers::Init(uint NumDecode, bool ExtraForPause,

for (uint i = 0; i < NumDecode; i++)
Enqueue(kVideoBuffer_avail, At(i));
SetDeinterlacing(DEINT_NONE, DEINT_NONE);
}

void VideoBuffers::SetDeinterlacing(MythDeintType Single, MythDeintType Double)
{
QMutexLocker locker(&m_globalLock);
frame_vector_t::iterator it = m_buffers.begin();
for ( ; it != m_buffers.end(); ++it)
SetDeinterlacingFlags(*it, Single, Double);
}

/*! \brief Set the appropriate flags for single and double rate deinterlacing
* \note Double rate support is determined by the VideoOutput class and must be set appropriately
* \note Driver deinterlacers are only available for hardware frames with the exception of
* NVDEC and VTB which can use shaders.
* \note Shader and CPU deinterlacers are disabled for hardware frames (except for shaders with NVDEC and VTB)
* \note There is no support for CPU deinterlacing of NV12 frames (and 10bit eqivalents) so we
* fallback to shaders
* \todo Handling of decoder deinterlacing with NVDEC
*/
void VideoBuffers::SetDeinterlacingFlags(VideoFrame &Frame, MythDeintType Single, MythDeintType Double)
{
static const MythDeintType hardware = DEINT_ALL & ~(DEINT_CPU | DEINT_SHADER);
static const MythDeintType software = DEINT_ALL & ~DEINT_DRIVER;
static const MythDeintType shaders = software & ~DEINT_CPU;
Frame.deinterlace_single = Single;
Frame.deinterlace_double = Double;
Frame.deinterlace_allowed = format_is_hw(Frame.codec) ? (format_is_hwyuv(Frame.codec) ? software : hardware) :
(format_is_nv12(Frame.codec) ? shaders : software);
}

/**
Expand All @@ -220,15 +249,6 @@ void VideoBuffers::Init(uint NumDecode, bool ExtraForPause,
void VideoBuffers::Reset()
{
QMutexLocker locker(&m_globalLock);

// Delete ffmpeg VideoFrames so we can create
// a different number of buffers below
frame_vector_t::iterator it = m_buffers.begin();
for (;it != m_buffers.end(); ++it)
{
av_freep(&it->qscale_table);
}

m_available.clear();
m_used.clear();
m_limbo.clear();
Expand All @@ -252,9 +272,9 @@ void VideoBuffers::SetPrebuffering(bool Normal)
void VideoBuffers::ReleaseDecoderResources(VideoFrame *Frame)
{
#if defined(USING_MEDIACODEC) || defined(USING_VTB) || defined(USING_VAAPI) || defined(USING_VDPAU) || defined(USING_NVDEC)
if ((Frame->pix_fmt == AV_PIX_FMT_VIDEOTOOLBOX) || (Frame->pix_fmt == AV_PIX_FMT_MEDIACODEC) ||
(Frame->pix_fmt == AV_PIX_FMT_VAAPI) || (Frame->pix_fmt == AV_PIX_FMT_VDPAU) ||
(Frame->pix_fmt == AV_PIX_FMT_CUDA))
if ((Frame->codec == FMT_VTB) || (Frame->codec == FMT_MEDIACODEC) ||
(Frame->codec == FMT_VAAPI) || (Frame->codec == FMT_VDPAU) ||
(Frame->codec == FMT_NVDEC))
{
AVBufferRef* ref = reinterpret_cast<AVBufferRef*>(Frame->priv[0]);
if (ref)
Expand Down Expand Up @@ -858,7 +878,6 @@ bool VideoBuffers::CreateBuffers(VideoFrameType Type, int Width, int Height,
Type, Buffers[i], YUVInfos[i].m_width, YUVInfos[i].m_height,
max(buf_size, YUVInfos[i].m_size),
(const int*) YUVInfos[i].m_pitches, (const int*) YUVInfos[i].m_offsets);

ok &= (Buffers[i] != nullptr);
}

Expand Down Expand Up @@ -907,11 +926,7 @@ void VideoBuffers::DeleteBuffers(void)
{
next_dbg_str = 0;
for (uint i = 0; i < Size(); i++)
{
m_buffers[i].buf = nullptr;
av_freep(&m_buffers[i].qscale_table);
}

for (size_t i = 0; i < m_allocatedArrays.size(); i++)
av_free(m_allocatedArrays[i]);
m_allocatedArrays.clear();
Expand All @@ -933,20 +948,17 @@ bool VideoBuffers::ReinitBuffer(VideoFrame *Frame, VideoFrameType Type)
{
if (At(i) == Frame)
{
av_freep(&Frame->qscale_table);
Frame->qscale_table = nullptr;

VideoFrameType old = Frame->codec;
int size = buffersize(Type, Frame->width, Frame->height);
int size = static_cast<int>(buffersize(Type, Frame->width, Frame->height));
unsigned char *buf = Frame->buf;
if (Frame->size != size)
{
// Free existing buffer
av_free(m_allocatedArrays[i]);
m_allocatedArrays[i] = Frame->buf = nullptr;
m_allocatedArrays[i] = Frame->buf = buf = nullptr;

// Initialise new
buf = (unsigned char*)av_malloc(size + 64);
buf = static_cast<unsigned char*>(av_malloc(static_cast<size_t>(size + 64)));
if (!buf)
{
LOG(VB_GENERAL, LOG_ERR, "Failed to reallocate frame buffer");
Expand All @@ -956,9 +968,13 @@ bool VideoBuffers::ReinitBuffer(VideoFrame *Frame, VideoFrameType Type)
}

m_allocatedArrays[i] = buf;
MythDeintType singler = Frame->deinterlace_single;
MythDeintType doubler = Frame->deinterlace_double;
init(Frame, Type, buf, Frame->width, Frame->height, size);
// retain deinterlacer settings and update restrictions based on new frame type
SetDeinterlacingFlags(*Frame, singler, doubler);
clear(Frame);
LOG(VB_PLAYBACK, LOG_DEBUG, QString("Reallocated frame %1->%2")
LOG(VB_PLAYBACK, LOG_INFO, QString("Reallocated frame %1->%2")
.arg(format_description(old)).arg(format_description(Type)));
EndLock();
return true;
Expand Down
2 changes: 2 additions & 0 deletions mythtv/libs/libmythtv/videobuffers.h
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ class MTV_PUBLIC VideoBuffers
bool CreateBuffers(VideoFrameType Type, int Width, int Height);
bool ReinitBuffer(VideoFrame *Frame, VideoFrameType Type);
void DeleteBuffers(void);
void SetDeinterlacing(MythDeintType Single, MythDeintType Double);

void Reset(void);
void DiscardFrames(bool NextFrameIsKeyFrame);
Expand Down Expand Up @@ -124,6 +125,7 @@ class MTV_PUBLIC VideoBuffers
const frame_queue_t *Queue(BufferType Type) const;
VideoFrame *GetNextFreeFrameInternal(BufferType EnqueueTo);
void ReleaseDecoderResources(VideoFrame *Frame);
void SetDeinterlacingFlags(VideoFrame &Frame, MythDeintType Single, MythDeintType Double);

frame_queue_t m_available;
frame_queue_t m_used;
Expand Down
15 changes: 1 addition & 14 deletions mythtv/libs/libmythtv/videoout_d3d.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,6 @@ bool VideoOutputD3D::InputChanged(const QSize &video_dim_buf,
if (Init(video_dim_buf, video_dim_disp,
aspect, (WId)m_hWnd, disp, av_codec_id))
{
BestDeint();
return true;
}

Expand Down Expand Up @@ -512,7 +511,7 @@ void VideoOutputD3D::ProcessFrame(VideoFrame *frame, OSD *osd,
if (frame)
dummy = frame->dummy;

bool safepauseframe = pauseframe && !IsBobDeint() && !gpu;
bool safepauseframe = pauseframe && !gpu;

if (!window.IsEmbedding())
ShowPIPs(frame, pipPlayers);
Expand Down Expand Up @@ -640,18 +639,6 @@ MythPainter *VideoOutputD3D::GetOSDPainter(void)
return m_osd_painter;
}

bool VideoOutputD3D::ApproveDeintFilter(const QString& filtername) const
{
if (codec_is_std(video_codec_id))
{
return !filtername.contains("bobdeint") &&
!filtername.contains("opengl") &&
!filtername.contains("vdpau");
}

return false;
}

MythCodecID VideoOutputD3D::GetBestSupportedCodec(
uint width, uint height, const QString &decoder,
uint stream_type, bool no_acceleration,
Expand Down
1 change: 0 additions & 1 deletion mythtv/libs/libmythtv/videoout_d3d.h
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ class VideoOutputD3D : public VideoOutput
MythPainter *GetOSDPainter(void) override; // VideoOutput
bool hasHWAcceleration(void) const override // VideoOutput
{ return !codec_is_std(video_codec_id); }
bool ApproveDeintFilter(const QString& filtername) const override; // VideoOutput
void* GetDecoderContext(unsigned char* buf, uint8_t*& id) override; // VideoOutput

bool CanVisualise(AudioPlayer *audio, MythRender */*render*/) override // VideoOutput
Expand Down
12 changes: 9 additions & 3 deletions mythtv/libs/libmythtv/videoout_null.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,11 @@ void VideoOutputNull::StopEmbedding(void)
VideoOutput::StopEmbedding();
}

void VideoOutputNull::SetDeinterlacing(bool, bool)
{
vbuffers.SetDeinterlacing(DEINT_NONE, DEINT_NONE);
}

void VideoOutputNull::PrepareFrame(VideoFrame *buffer, FrameScanType t,
OSD *osd)
{
Expand Down Expand Up @@ -242,8 +247,9 @@ void VideoOutputNull::UpdatePauseFrame(int64_t &disp_timecode)
disp_timecode = av_pause_frame.disp_timecode;
}

void VideoOutputNull::ProcessFrame(VideoFrame */*frame*/, OSD */*osd*/,
const PIPMap &/*pipPlayers*/,
FrameScanType /*scan*/)
void VideoOutputNull::ProcessFrame(VideoFrame *Frame, OSD*, const PIPMap &,
FrameScanType Scan)
{
if (Frame && !Frame->dummy)
m_deinterlacer.Filter(Frame, Scan);
}
3 changes: 1 addition & 2 deletions mythtv/libs/libmythtv/videoout_null.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@ class VideoOutputNull : public VideoOutput
const QSize &video_dim_disp,
float aspect,
WId winid, const QRect &win_rect, MythCodecID codec_id) override; // VideoOutput
bool SetupDeinterlace(bool, const QString &overridefilter = "") override // VideoOutput
{ (void)overridefilter; return false; } // we don't deinterlace in null output..
void SetDeinterlacing(bool Enable, bool DoubleRate) override;
void PrepareFrame(VideoFrame *buffer, FrameScanType, OSD *osd) override; // VideoOutput
void Show(FrameScanType ) override; // VideoOutput
void CreatePauseFrame(void);
Expand Down
71 changes: 2 additions & 69 deletions mythtv/libs/libmythtv/videoout_omx.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -245,14 +245,15 @@ void VideoOutputOMX::GetRenderOptions(render_opts &opts,
#endif
(*opts.osds)[kName].append("softblend");

(*opts.safe_renderers)["dummy"].append(kName);
/*(*opts.safe_renderers)["dummy"].append(kName);
(*opts.safe_renderers)["nuppel"].append(kName);
if (opts.decoders->contains("ffmpeg"))
(*opts.safe_renderers)["ffmpeg"].append(kName);
if (opts.decoders->contains(PrivateDecoderOMX::DecoderName))
(*opts.safe_renderers)[PrivateDecoderOMX::DecoderName].append(kName);
opts.priorities->insert(kName, 70);
*/
}

// static
Expand Down Expand Up @@ -537,74 +538,6 @@ bool VideoOutputOMX::InputChanged( // Return true if successful
return true;
}

// virtual
bool VideoOutputOMX::ApproveDeintFilter(const QString& filtername) const
{
if (filtername.contains(kName))
return true;

return VideoOutput::ApproveDeintFilter(filtername);
}

// virtual
bool VideoOutputOMX::SetDeinterlacingEnabled(bool interlaced)
{
return SetupDeinterlace(interlaced);
}

// virtual
bool VideoOutputOMX::SetupDeinterlace(bool interlaced, const QString &overridefilter)
{
if (!m_imagefx.IsValid())
return VideoOutput::SetupDeinterlace(interlaced, overridefilter);

QString deintfiltername;
if (db_vdisp_profile)
deintfiltername = db_vdisp_profile->GetFilteredDeint(overridefilter);

if (!deintfiltername.contains(kName))
{
if (m_deinterlacing && m_deintfiltername.contains(kName))
SetImageFilter(OMX_ImageFilterNone);
return VideoOutput::SetupDeinterlace(interlaced, overridefilter);
}

if (m_deinterlacing == interlaced && deintfiltername == m_deintfiltername)
return m_deinterlacing;

m_deintfiltername = deintfiltername;
m_deinterlacing = interlaced;

LOG(VB_PLAYBACK, LOG_INFO, LOC + __func__ + " switching " +
(interlaced ? "on" : "off") + " '" + deintfiltername + "'");

OMX_IMAGEFILTERTYPE type;
if (!m_deinterlacing || m_deintfiltername.isEmpty())
type = OMX_ImageFilterNone;
#ifdef USING_BROADCOM
else if (m_deintfiltername.contains("advanced"))
type = OMX_ImageFilterDeInterlaceAdvanced;
else if (m_deintfiltername.contains("fast"))
type = OMX_ImageFilterDeInterlaceFast;
else if (m_deintfiltername.contains("linedouble"))
type = OMX_ImageFilterDeInterlaceLineDouble;
#endif
else
{
LOG(VB_GENERAL, LOG_ERR, LOC + __func__ + " Unknown type: '" +
m_deintfiltername + "'");
#ifdef USING_BROADCOM
type = OMX_ImageFilterDeInterlaceFast;
#else
type = OMX_ImageFilterNone;
#endif
}

(void)SetImageFilter(type);

return m_deinterlacing;
}

OMX_ERRORTYPE VideoOutputOMX::SetImageFilter(OMX_IMAGEFILTERTYPE type)
{
LOG(VB_PLAYBACK, LOG_DEBUG, LOC + __func__ + " " + Filter2String(type));
Expand Down
3 changes: 0 additions & 3 deletions mythtv/libs/libmythtv/videoout_omx.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,6 @@ class VideoOutputOMX : public VideoOutput, private OMXComponentCtx
bool InputChanged(const QSize&, const QSize&, float, MythCodecID, bool&, MythMultiLocker*) override;
void EmbedInWidget(const QRect&) override;
void StopEmbedding(void) override;
bool ApproveDeintFilter(const QString&) const override;
bool SetDeinterlacingEnabled(bool interlaced) override;
bool SetupDeinterlace(bool interlaced, const QString& overridefilter="") override;
QString GetName(void) const override { return kName; } // = "openmax"
bool IsPIPSupported(void) const override { return true; }
bool IsPBPSupported(void) const override { return true; }
Expand Down
104 changes: 11 additions & 93 deletions mythtv/libs/libmythtv/videoout_opengl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -233,13 +233,7 @@ bool VideoOutputOpenGL::Init(const QSize &VideoDim, const QSize &VideoDispDim, f

// Reset OpenGLVideo
if (m_openGLVideo->IsValid())
{
m_openGLVideo->ResetFrameFormat();
bool temp_deinterlacing = m_deinterlacing;
SetDeinterlacingEnabled(true);
if (!temp_deinterlacing)
SetDeinterlacingEnabled(false);
}

// Finalise output
MoveResize();
Expand Down Expand Up @@ -358,7 +352,7 @@ bool VideoOutputOpenGL::CreateBuffers(MythCodecID CodecID, QSize Size)

void VideoOutputOpenGL::ProcessFrame(VideoFrame *Frame, OSD */*osd*/,
const PIPMap &PiPPlayers,
FrameScanType)
FrameScanType Scan)
{
if (!m_render)
return;
Expand All @@ -385,10 +379,7 @@ void VideoOutputOpenGL::ProcessFrame(VideoFrame *Frame, OSD */*osd*/,
m_newAspect = 0.0f;

if (wasembedding && ok)
{
EmbedInWidget(oldrect);
BestDeint();
}

if (!ok)
return;
Expand All @@ -397,8 +388,12 @@ void VideoOutputOpenGL::ProcessFrame(VideoFrame *Frame, OSD */*osd*/,
if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
m_render->logDebugMarker(LOC + "PROCESS_FRAME_START");

bool swframe = Frame ? !format_is_hw(Frame->codec) : false;
bool dummy = Frame ? Frame->dummy : false;
bool swframe = Frame ? !format_is_hw(Frame->codec) : false;
bool dummy = Frame ? Frame->dummy : false;

// software deinterlacing
if (!dummy && swframe)
m_deinterlacer.Filter(Frame, Scan);

if (!window.IsEmbedding())
{
Expand Down Expand Up @@ -431,7 +426,7 @@ void VideoOutputOpenGL::PrepareFrame(VideoFrame *Frame, FrameScanType Scan, OSD
if (Frame)
{
framesPlayed = Frame->frameNumber + 1;
topfieldfirst = Frame->top_field_first;
topfieldfirst = Frame->interlaced_reversed ? !Frame->top_field_first : Frame->top_field_first;
dummy = Frame->dummy;
}
else
Expand Down Expand Up @@ -589,7 +584,9 @@ VideoFrameType* VideoOutputOpenGL::DirectRenderFormats(void)
{
static VideoFrameType openglformats[] =
{ FMT_YV12, FMT_NV12, FMT_YUY2, FMT_YUV422P,
FMT_YUV420P10, FMT_YUV420P12, FMT_YUV420P16, FMT_NONE };
FMT_YUV420P10, FMT_YUV420P12, FMT_YUV420P16,
FMT_P010, FMT_P016,
FMT_NONE };
return &openglformats[0];
}

Expand Down Expand Up @@ -661,67 +658,6 @@ void VideoOutputOpenGL::InitPictureAttributes(void)
videoColourSpace.SetSupportedAttributes(ALL_PICTURE_ATTRIBUTES);
}

bool VideoOutputOpenGL::SetupDeinterlace(bool Interlaced, const QString &OverrideFilter)
{
if (!m_openGLVideo || !m_render)
return false;

OpenGLLocker ctx_lock(m_render);

if (db_vdisp_profile)
m_deintfiltername = db_vdisp_profile->GetFilteredDeint(OverrideFilter);

if (!m_deintfiltername.contains("opengl") && !m_deintfiltername.contains("vaapi"))
{
m_openGLVideo->SetDeinterlacing(false);
VideoOutput::SetupDeinterlace(Interlaced, OverrideFilter);
return m_deinterlacing;
}

if (m_videoProfile.contains("hw"))
{
m_deinterlacing = m_deintfiltername.contains("vaapi") ? Interlaced : false;
return m_deinterlacing;
}

m_deinterlacing = Interlaced;
if (m_deinterlacing && !m_deintfiltername.isEmpty())
{
if (!m_openGLVideo->AddDeinterlacer(m_deintfiltername))
{
LOG(VB_GENERAL, LOG_ERR, LOC + QString("Couldn't load deinterlace filter %1").arg(m_deintfiltername));
m_deinterlacing = false;
m_deintfiltername = "";
}
else
{
LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Using deinterlace method %1").arg(m_deintfiltername));
}
}

m_openGLVideo->SetDeinterlacing(m_deinterlacing);
return m_deinterlacing;
}

bool VideoOutputOpenGL::SetDeinterlacingEnabled(bool Enable)
{
if (!m_openGLVideo || !m_render)
return false;

OpenGLLocker ctx_lock(m_render);

if (Enable)
{
if (!m_deintfiltername.contains("opengl"))
m_openGLVideo->SetDeinterlacing(false);
return SetupDeinterlace(Enable);
}

m_openGLVideo->SetDeinterlacing(Enable);
m_deinterlacing = Enable;
return m_deinterlacing;
}

void VideoOutputOpenGL::ShowPIP(VideoFrame*, MythPlayer *PiPPlayer, PIPLocation Location)
{
if (!PiPPlayer)
Expand Down Expand Up @@ -822,24 +758,6 @@ void VideoOutputOpenGL::StopEmbedding(void)
MoveResize();
}

bool VideoOutputOpenGL::ApproveDeintFilter(const QString &Deinterlacer) const
{
bool hw = m_videoProfile.contains("hw");
// anything OpenGL when using shaders
if (!hw && Deinterlacer.contains("opengl"))
return true;

// vaapi if allowed
if (hw && Deinterlacer.contains("vaapi"))
return true;

// anything software based
if (!hw && !Deinterlacer.contains("vdpau") && !Deinterlacer.contains("vaapi"))
return true;

return VideoOutput::ApproveDeintFilter(Deinterlacer);
}

QStringList VideoOutputOpenGL::GetVisualiserList(void)
{
if (m_render)
Expand Down
3 changes: 0 additions & 3 deletions mythtv/libs/libmythtv/videoout_opengl.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,11 @@ class VideoOutputOpenGL : public VideoOutput
void InitPictureAttributes(void) override;
void EmbedInWidget(const QRect &Rect) override;
void StopEmbedding(void) override;
bool SetDeinterlacingEnabled(bool Enable) override;
bool SetupDeinterlace(bool Interlaced, const QString &OverrideFilter = QString()) override;
void ShowPIP(VideoFrame *Frame, MythPlayer *PiPPlayer, PIPLocation Location) override;
void MoveResizeWindow(QRect NewRect) override;
void RemovePIP(MythPlayer *PiPPlayer) override;
bool IsPIPSupported(void) const override { return true; }
bool hasFullScreenOSD(void) const override { return true; }
bool ApproveDeintFilter(const QString& Deinterlacer) const override;
MythPainter *GetOSDPainter(void) override;
bool CanVisualise(AudioPlayer *Audio, MythRender *Render) override;
bool SetupVisualisation(AudioPlayer *Audio, MythRender *Render, const QString &Name) override;
Expand Down
182 changes: 27 additions & 155 deletions mythtv/libs/libmythtv/videooutbase.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -319,10 +319,6 @@ VideoOutput::VideoOutput() :
vsz_tmp_buf(nullptr),
vsz_scale_context(nullptr),

// Deinterlacing
m_deinterlacing(false),
m_deintfiltername("linearblend"),

// Various state variables
errorState(kError_None), framesPlayed(0),

Expand All @@ -340,7 +336,9 @@ VideoOutput::VideoOutput() :
m_visual(nullptr),

// 3D TV
m_stereo(kStereoscopicModeNone)
m_stereo(kStereoscopicModeNone),

m_deinterlacer()
{
memset(&pip_tmp_image, 0, sizeof(pip_tmp_image));
db_display_dim = QSize(gCoreContext->GetNumSetting("DisplaySizeWidth", 0),
Expand Down Expand Up @@ -434,146 +432,24 @@ void VideoOutput::SetVideoFrameRate(float playback_fps)
db_vdisp_profile->SetOutput(playback_fps);
}

/**
* \fn VideoOutput::SetDeinterlacingEnabled(bool)
* \brief Attempts to enable/disable deinterlacing using
* existing deinterlace method when enabling.
*/
bool VideoOutput::SetDeinterlacingEnabled(bool enable)
{
m_deinterlacing = enable;
return m_deinterlacing;
}

/**
* \brief Attempts to enable or disable deinterlacing.
* \return true if successful, false otherwise.
* \param interlaced Desired state of interlacing.
* \param overridefilter optional, explicitly use this nondefault
* deinterlacing filter
*/
bool VideoOutput::SetupDeinterlace(bool interlaced,
const QString& overridefilter)
void VideoOutput::SetDeinterlacing(bool Enable, bool DoubleRate)
{
PIPState pip_state = window.GetPIPState();

if (pip_state > kPIPOff && pip_state < kPBPLeft)
return false;

if (m_deinterlacing == interlaced)
{
if (!m_deinterlacing)
return false;
if (overridefilter.isEmpty() || overridefilter == m_deintfiltername)
return true;
}

m_deinterlacing = interlaced;

if (m_deinterlacing)
if (!Enable)
{
if (db_vdisp_profile)
m_deintfiltername = db_vdisp_profile->GetFilteredDeint(overridefilter);
else
m_deintfiltername = "";

if (MythCodecContext::isCodecDeinterlacer(m_deintfiltername))
{
m_deinterlacing = false;
return false;
}

if (!m_deintfiltername.isEmpty())
{
if (!ApproveDeintFilter(m_deintfiltername))
{
LOG(VB_GENERAL, LOG_ERR,
QString("Failed to approve '%1' deinterlacer "
"as a software deinterlacer")
.arg(m_deintfiltername));
m_deintfiltername.clear();
}
}

LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Using deinterlace method %1")
.arg(m_deintfiltername));
vbuffers.SetDeinterlacing(DEINT_NONE, DEINT_NONE);
return;
}

return m_deinterlacing;
}

/** \fn VideoOutput::FallbackDeint(void)
* \brief Fallback to non-frame-rate-doubling deinterlacing method.
*/
void VideoOutput::FallbackDeint(void)
{
SetupDeinterlace(false);
if (db_vdisp_profile)
SetupDeinterlace(true, db_vdisp_profile->GetFallbackDeinterlacer());
}

/** \fn VideoOutput::BestDeint(void)
* \brief Change to the best deinterlacing method.
*/
void VideoOutput::BestDeint(void)
{
SetupDeinterlace(false);
SetupDeinterlace(true);
}

/** \fn VideoOutput::IsExtraProcessingRequired(void) const
* \brief Should Prepare() and Show() and ProcessFrame be called
* twice for every Frameloop().
*
* All adaptive full framerate deinterlacers require an extra
* ProcessFrame() call.
*
* \return true if deint name contains doubleprocess
*/
bool VideoOutput::IsExtraProcessingRequired(void) const
{
return (m_deintfiltername.contains("doubleprocess")) && m_deinterlacing;
}
/**
* \fn VideoOutput::NeedsDoubleFramerate() const
* \brief Should Prepare() and Show() be called twice for every ProcessFrame().
*
* \return m_deintfiltername == "bobdeint" && m_deinterlacing
*/
bool VideoOutput::NeedsDoubleFramerate() const
{
// Bob deinterlace requires doubling framerate
return ((m_deintfiltername.contains("bobdeint") ||
m_deintfiltername.contains("doublerate") ||
m_deintfiltername.contains("doubleprocess")) &&
m_deinterlacing);
}

bool VideoOutput::IsBobDeint(void) const
{
return (m_deinterlacing && m_deintfiltername == "bobdeint");
}

/**
* \fn VideoOutput::ApproveDeintFilter(const QString& filtername) const
* \brief Approves all deinterlace filters, except ones which
* must be supported by a specific video output class.
*/
bool VideoOutput::ApproveDeintFilter(const QString& filtername) const
{
// Default to not supporting bob deinterlace
return (!filtername.contains("bobdeint") &&
!filtername.contains("doublerate") &&
!filtername.contains("opengl") &&
!filtername.contains("vdpau"));
}

void VideoOutput::GetDeinterlacers(QStringList &deinterlacers)
{
if (!db_vdisp_profile)
return;
QString rend = db_vdisp_profile->GetActualVideoRenderer();
deinterlacers = VideoDisplayProfile::GetDeinterlacers(rend);
MythDeintType singlerate = DEINT_HIGH | DEINT_CPU | DEINT_SHADER | DEINT_DRIVER;
MythDeintType doublerate = DoubleRate ? DEINT_HIGH | DEINT_CPU | DEINT_SHADER | DEINT_DRIVER : DEINT_NONE;
//if (db_vdisp_profile)
//{
// singlerate = db_vdisp_profile->GetFilteredDeint();
// doublerate = DoubleRate ? db_vdisp_profile->GetFilteredDeint(true) : DEINT_NONE;
//}
LOG(VB_GENERAL, LOG_INFO, LOC + QString("SetDeinterlacing: %1 DoubleRate %2")
.arg(DeinterlacerPref(singlerate)).arg(DeinterlacerPref(doublerate)));
vbuffers.SetDeinterlacing(singlerate, doublerate);
}

/**
Expand Down Expand Up @@ -609,10 +485,7 @@ bool VideoOutput::InputChanged(const QSize &video_dim_buf,
if (db_vdisp_profile)
db_vdisp_profile->SetInput(window.GetVideoDim(),0,codecName);
video_codec_id = myth_codec_id;
BestDeint();

DiscardFrames(true);

return true;
}
/**
Expand Down Expand Up @@ -1770,20 +1643,19 @@ int VideoOutput::CalcHueBase(const QString &adaptor_name)
{
int hue_adj = 50;

// XVideo adjustments
if ((adaptor_name == "ATI Radeon Video Overlay") ||
(adaptor_name == "XA G3D Textured Video") || /* ATI in VMWare*/
(adaptor_name == "Radeon Textured Video") || /* ATI */
(adaptor_name == "AMD Radeon AVIVO Video") || /* ATI */
(adaptor_name == "XV_SWOV" /* VIA 10K & 12K */) ||
(adaptor_name == "Savage Streams Engine" /* S3 Prosavage DDR-K */) ||
(adaptor_name == "SIS 300/315/330 series Video Overlay") ||
adaptor_name.toLower().contains("xvba") /* VAAPI */ ||
adaptor_name.toLower().startsWith("intel i965 driver"))
QString lower = adaptor_name.toLower();
// Hue base for different adaptors
// This can probably now be removed as it is only relevant to VAAPI
// which always uses 50
if (lower.contains("radeon") ||
lower.contains("g3d") ||
lower.contains("xvba") /* VAAPI */ ||
lower.startsWith("intel") ||
lower.contains("splitted"))
{
hue_adj = 50;
}
else if (adaptor_name.startsWith("NV17")) /* nVidia */
else if (lower.startsWith("nv17")) /* nVidia */
{
hue_adj = 0;
}
Expand Down
30 changes: 8 additions & 22 deletions mythtv/libs/libmythtv/videooutbase.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
#include "videocolourspace.h"
#include "visualisations/videovisual.h"
#include "mythavutil.h"
#include "mythdeinterlacer.h"

using namespace std;

Expand Down Expand Up @@ -62,18 +63,10 @@ class VideoOutput
WId winid, const QRect &win_rect, MythCodecID codec_id);
virtual void InitOSD(OSD *osd);
virtual void SetVideoFrameRate(float);
virtual bool SetDeinterlacingEnabled(bool);
virtual bool SetupDeinterlace(bool interlaced, const QString& overridefilter="");
virtual void FallbackDeint(void);
virtual void BestDeint(void);
virtual bool NeedsDoubleFramerate(void) const;
virtual bool IsBobDeint(void) const;
virtual bool IsExtraProcessingRequired(void) const;
virtual bool ApproveDeintFilter(const QString& filtername) const;
void GetDeinterlacers(QStringList &deinterlacers);
QString GetDeinterlacer(void) { return m_deintfiltername; }
virtual void PrepareFrame(VideoFrame *buffer, FrameScanType,
OSD *osd) = 0;
virtual void SetDeinterlacing(bool Enable, bool DoubleRate);
virtual void ProcessFrame(VideoFrame *Frame, OSD *Osd, const PIPMap &PipPlayers,
FrameScanType Scan = kScan_Ignore) = 0;
virtual void PrepareFrame(VideoFrame *buffer, FrameScanType, OSD *osd) = 0;
virtual void Show(FrameScanType) = 0;
VideoDisplayProfile *GetProfile() { return db_vdisp_profile; }

Expand Down Expand Up @@ -137,12 +130,6 @@ class VideoOutput
AdjustFillMode adjustFillMode = kAdjustFill_Toggle);

QString GetZoomString(void) const { return window.GetZoomString(); }

// pass in null to use the pause frame, if it exists.
virtual void ProcessFrame(VideoFrame *frame, OSD *osd,
const PIPMap &pipPlayers,
FrameScanType scan = kScan_Ignore) = 0;

PictureAttributeSupported GetSupportedPictureAttributes(void)
{ return videoColourSpace.SupportedAttributes(); }
int ChangePictureAttribute(PictureAttribute, bool direction);
Expand Down Expand Up @@ -334,10 +321,6 @@ class VideoOutput
unsigned char *vsz_tmp_buf;
struct SwsContext *vsz_scale_context;

// Deinterlacing
bool m_deinterlacing;
QString m_deintfiltername;

/// VideoBuffers instance used to track video output buffers.
VideoBuffers vbuffers;

Expand Down Expand Up @@ -365,6 +348,9 @@ class VideoOutput
StereoscopicMode m_stereo;

MythAVCopy m_copyFrame;

// Software deinterlacer
MythDeinterlacer m_deinterlacer;
};

#endif
22 changes: 7 additions & 15 deletions mythtv/programs/mythavtest/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -99,24 +99,11 @@ class VideoPerformanceTest
else if (decodeonly)
LOG(VB_GENERAL, LOG_INFO, "Decoding frames only - skipping display.");

bool doublerate = vo->NeedsDoubleFramerate();
if (deinterlace)
{
LOG(VB_GENERAL, LOG_INFO, QString("Deinterlacing: %1")
.arg(doublerate ? "doublerate" : "singlerate"));
if (doublerate)
LOG(VB_GENERAL, LOG_INFO, "Output will show fields per second");
}
else
{
LOG(VB_GENERAL, LOG_INFO, "Deinterlacing disabled");
}

DecoderBase* dec = mp->GetDecoder();
if (dec)
LOG(VB_GENERAL, LOG_INFO, QString("Using decoder: %1").arg(dec->GetCodecDecoderName()));

Jitterometer *jitter = new Jitterometer("Performance: ", mp->GetFrameRate() * (doublerate ? 2 : 1));
Jitterometer *jitter = new Jitterometer("Performance: ", static_cast<int>(mp->GetFrameRate()));

int ms = secondstorun * 1000;
QTime start = QTime::currentTime();
Expand Down Expand Up @@ -154,12 +141,17 @@ class VideoPerformanceTest

if (!decodeonly)
{
MythDeintType doubledeint = GetDoubleRateOption(frame, DEINT_CPU | DEINT_SHADER | DEINT_DRIVER);
vo->ProcessFrame(frame, nullptr, dummy, scan);
vo->PrepareFrame(frame, scan, nullptr);
vo->Show(scan);

if (vo->NeedsDoubleFramerate() && deinterlace)
if (doubledeint && deinterlace)
{
doubledeint = GetDoubleRateOption(frame, DEINT_CPU);
MythDeintType other = GetDoubleRateOption(frame, DEINT_SHADER | DEINT_DRIVER);
if (doubledeint && !other)
vo->ProcessFrame(frame, nullptr, dummy, kScan_Intr2ndField);
vo->PrepareFrame(frame, kScan_Intr2ndField, nullptr);
vo->Show(scan);
}
Expand Down