Skip to content

Commit

Permalink
Adding example for Pi 5
Browse files Browse the repository at this point in the history
  • Loading branch information
ross-newman committed May 20, 2024
1 parent 8704cf5 commit 74eee0e
Show file tree
Hide file tree
Showing 8 changed files with 155 additions and 31 deletions.
4 changes: 2 additions & 2 deletions example/rtp_sap_transmit/rtp_sap_transmit.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
// [Transmit example wrapper]
#include "rtp/rtp.h"
int main(int argc, char *argv[]) {
mediax::RtpSapTransmit<mediax::rtp::av1::gst::RtpAv1GstPayloader>
rtp("238.192.1.1", 5004, "test-session-name", 640, 480, 25, "AV1");
mediax::RtpSapTransmit<mediax::rtp::av1::gst::RtpAv1GstPayloader> rtp("238.192.1.1", 5004, "test-session-name", 640,
480, 25, "AV1");
std::vector<uint8_t> &data = rtp.GetBufferTestPattern();
while (true) rtp.Transmit(data.data(), false);
}
Expand Down
43 changes: 37 additions & 6 deletions example/rtp_sap_transmit/rtp_sap_transmit_arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,44 @@
///

#include "rtp/rtp.h"

#define LIVE 0

int main(int argc, char *argv[]) {
mediax::RtpSapTransmit<mediax::rtp::h264::gst::omx::RtpH264GstOmxPayloader> rtp(
uint32_t count = 0;
// Use OMX to get hardware offload on the Raspberry Pi 5
// mediax::RtpSapTransmit<mediax::rtp::h264::gst::omx::RtpH264GstOmxPayloader> rtp(
// "238.192.1.1", 5004, "test-session-name", 640, 480, 25, "H264");

// Use Open to get software encoding on most platforms
mediax::RtpSapTransmit<mediax::rtp::h264::gst::open::RtpH264GstOpenPayloader> rtp(
"238.192.1.1", 5004, "test-session-name", 640, 480, 25, "H264");
// Create a buffer in unsupported format
std::vector<uint8_t> &data = rtp.GetBufferTestPattern(640, 480, ::mediax::rtp::ColourspaceType::kColourspaceYuv422);
// Convert the buffer to RGB
mediax::video::ColourSpaceCpu converter;

while (true) rtp.Transmit(data.data(), false);
// Convert functions optimised for CPU
mediax::video::ColourSpaceCpu convert;

// Converted RGBA buffer
std::vector<uint8_t> rgb_buffer(640 * 480 * 3);

while (true) {
#if LIVE
// Capture a frame from a live video source in YUC420P format (Block till frame recieved)
#else

// Create a buffer in YUV420P format
std::vector<uint8_t> &data =
rtp.GetBufferTestPattern(640, 480, ::mediax::rtp::ColourspaceType::kColourspaceYuv420p, 10);
// 40ms delay
usleep(40000);
#endif
// Convert to the required format
convert.Yuv420pToRgb(480, 640, data.data(), rgb_buffer.data());

// Transmit the frame
rtp.Transmit(rgb_buffer.data(), false);

// Overwrite line
std::cout << "Frame " << count++ << "\r";
std::cout.flush();
}
}
2 changes: 1 addition & 1 deletion src/rtp/rtp_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ enum class ColourspaceType {
/// The bits per pixel
const std::map<ColourspaceType, uint8_t> kColourspaceBytes = {
{ColourspaceType::kColourspaceUndefined, 0}, {ColourspaceType::kColourspaceRgb24, 3},
{ColourspaceType::kColourspaceYuv422, 2}, {ColourspaceType::kColourspaceYuv420p, 2},
{ColourspaceType::kColourspaceYuv422, 2}, {ColourspaceType::kColourspaceYuv420p, 3},
{ColourspaceType::kColourspaceMono8, 1}, {ColourspaceType::kColourspaceMono16, 2},
{ColourspaceType::kColourspaceJpeg2000, 3}, {ColourspaceType::kColourspaceH264Part4, 3},
{ColourspaceType::kColourspaceH264Part10, 3}};
Expand Down
37 changes: 36 additions & 1 deletion src/rtp/rtp_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,11 @@ std::string ColourspaceTypeToString(rtp::ColourspaceType colourspace) {
return "H264Part10";
case rtp::ColourspaceType::kColourspaceH265:
return "H265";
case rtp::ColourspaceType::kColourspaceJpeg2000:
return "JPEG2000";
case rtp::ColourspaceType::kColourspaceAv1:
return "AV1";

default:
return "Unknown";
}
Expand All @@ -100,6 +105,8 @@ rtp::ColourspaceType ColourspaceTypeFromString(std::string_view str) {
return rtp::ColourspaceType::kColourspaceH264Part4;
} else if (str == "H264Part10") {
return rtp::ColourspaceType::kColourspaceH264Part10;
} else if (str == "H264") {
return rtp::ColourspaceType::kColourspaceH264Part10;
} else if (str == "H265") {
return rtp::ColourspaceType::kColourspaceH265;
} else {
Expand All @@ -114,8 +121,9 @@ uint8_t BitsPerPixel(rtp::ColourspaceType mode) {
case rtp::ColourspaceType::kColourspaceRgb24:
return 24;
case rtp::ColourspaceType::kColourspaceYuv422:
case rtp::ColourspaceType::kColourspaceYuv420p:
return 16;
case rtp::ColourspaceType::kColourspaceYuv420p:
return 12; // Just for the Y plane
case rtp::ColourspaceType::kColourspaceMono16:
return 16;
case rtp::ColourspaceType::kColourspaceMono8:
Expand Down Expand Up @@ -217,6 +225,7 @@ void DumpHex(const void *data, size_t size) {
///
void PackRgb(uint8_t *data, uint32_t r, uint32_t g, uint32_t b, mediax::rtp::ColourspaceType colourspace) {
static bool odd = true;
static int count = 0;
switch (colourspace) {
default:
data[0] = (uint8_t)r;
Expand All @@ -236,6 +245,24 @@ void PackRgb(uint8_t *data, uint32_t r, uint32_t g, uint32_t b, mediax::rtp::Col
}
data[1] = (uint8_t)(y);
} break;
case mediax::rtp::ColourspaceType::kColourspaceYuv420p: {
// Calculate Ydata, U, and V planar values
uint8_t y = 0.299 * r + 0.587 * g + 0.114 * b;
uint8_t u = -0.14713 * r - 0.28886 * g + 0.436 * b + 128;
uint8_t v = 0.615 * r - 0.51498 * g - 0.10001 * b + 128;

// YUV420P is a planar format, so Y, U, and V values are grouped together
data[0] = y;
// width = 640 if odd line
if (count / 640 % 2 == 0) {
if (count % 2 == 0) {
data[640 * 480] = u;
} else {
data[640 * 480 + (640 * 480 / 4)] = v;
}
}
count++;
} break;
case mediax::rtp::ColourspaceType::kColourspaceRgba:
data[0] = (uint8_t)r;
data[1] = (uint8_t)g;
Expand Down Expand Up @@ -470,7 +497,15 @@ void CreateBouncingBallTestCard(uint8_t *data, uint32_t width, uint32_t height,
mediax::rtp::ColourspaceType colourspace) {
int ball_size = 50;
int half = ball_size / 2;

uint32_t stride = mediax::BytesPerPixel(colourspace);

if (colourspace == mediax::rtp::ColourspaceType::kColourspaceYuv420p) {
// YUV420P is a packed format, so the stride is 1 bytes per pixel for Y
stride = 1;
memset(data, 0, width * height * 1.5);
}

uint32_t size = width * height;

static Ball ball = {static_cast<float>(width) / 2, static_cast<float>(height) / 2, 5,
Expand Down
1 change: 1 addition & 0 deletions src/utils/colourspace.h
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,7 @@ class ColourSpace {
/// \param rgb The RGB image buffer
///
virtual int YuvToRgb(uint32_t height, uint32_t width, uint8_t *yuv, uint8_t *rgb) const = 0;
virtual int Yuv420pToRgb(uint32_t height, uint32_t width, uint8_t *yuv, uint8_t *rgb) const = 0;

///
/// \brief Scale the image to the target size for RGB
Expand Down
49 changes: 47 additions & 2 deletions src/utils/colourspace_cpu.cc
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ int ColourSpaceCpu::Convert(mediax::rtp::Resolution res, uint8_t *in, AVPixelFor
const std::array<uint8_t *, 1> inData = {in};
std::array<uint8_t *, 1> outData = {out};

// Bits use for AV_PIX_FMT_RGB24
// Use static_cast instead of C-style cast
const std::array<int32_t, 1> inLinesize = {(int32_t)(res.width * in_bytes)};
std::array<int32_t, 1> outLinesize = {(int32_t)(res.width * out_bytes)};
Expand All @@ -57,6 +56,52 @@ int ColourSpaceCpu::YuvToRgb(uint32_t height, uint32_t width, uint8_t *yuv, uint
return Convert({width, height}, yuv, AV_PIX_FMT_UYVY422, 2, rgb, AV_PIX_FMT_RGB24, 3);
}

// int ColourSpaceCpu::Yuv420pToRgb(uint32_t height, uint32_t width, uint8_t *yuv, uint8_t *rgb) const {
// return Convert({width, height}, yuv, AV_PIX_FMT_YUV420P, 3, rgb, AV_PIX_FMT_RGB24, 3);
// }

int ColourSpaceCpu::Yuv420pToRgb(uint32_t height, uint32_t width, uint8_t *yuv420p, uint8_t *rgb) const {
if (!yuv420p || !rgb) {
return -1; // Invalid input pointers
}

uint32_t frameSize = width * height;
uint32_t chromaSize = frameSize / 4;
uint8_t *yPlane = yuv420p;
uint8_t *uPlane = yuv420p + frameSize;
uint8_t *vPlane = yuv420p + frameSize + chromaSize;

for (uint32_t j = 0; j < height; ++j) {
for (uint32_t i = 0; i < width; ++i) {
uint32_t yIndex = j * width + i;
uint32_t uvIndex = (j / 2) * (width / 2) + (i / 2);

uint8_t Y = yPlane[yIndex];
uint8_t U = uPlane[uvIndex];
uint8_t V = vPlane[uvIndex];

int C = Y - 16;
int D = U - 128;
int E = V - 128;

int R = (298 * C + 409 * E + 128) >> 8;
int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
int B = (298 * C + 516 * D + 128) >> 8;

R = R < 0 ? 0 : (R > 255 ? 255 : R);
G = G < 0 ? 0 : (G > 255 ? 255 : G);
B = B < 0 ? 0 : (B > 255 ? 255 : B);

uint32_t rgbIndex = yIndex * 3;
rgb[rgbIndex] = static_cast<uint8_t>(R);
rgb[rgbIndex + 1] = static_cast<uint8_t>(G);
rgb[rgbIndex + 2] = static_cast<uint8_t>(B);
}
}

return 0; // Success
}

int ColourSpaceCpu::YuvToBgra(uint32_t height, uint32_t width, uint8_t *yuv, uint8_t *gbra) const {
return Convert({width, height}, yuv, AV_PIX_FMT_UYVY422, 2, gbra, AV_PIX_FMT_BGRA, 4);
}
Expand Down Expand Up @@ -164,7 +209,7 @@ int ColourSpaceCpu::Yuv422ToRgba(uint32_t height, uint32_t width, uint8_t *yuv,
}

int ColourSpaceCpu::Yuv420ToRgba(uint32_t height, uint32_t width, uint8_t *yuv, uint8_t *rgba) const {
return Convert({width, height}, yuv, AV_PIX_FMT_YUV420P, 2, rgba, AV_PIX_FMT_RGBA, 4);
return Convert({width, height}, yuv, AV_PIX_FMT_YUV420P, 3, rgba, AV_PIX_FMT_RGBA, 4);
}

int ColourSpaceCpu::Mono8ToRgba(uint32_t width, uint32_t height, uint8_t *mono8, uint8_t *rgba) const {
Expand Down
1 change: 1 addition & 0 deletions src/utils/colourspace_cpu.h
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,7 @@ class ColourSpaceCpu : public ColourSpace {
/// \param rgb The RGB image buffer
///
int YuvToRgb(uint32_t height, uint32_t width, uint8_t *yuv, uint8_t *rgb) const final;
int Yuv420pToRgb(uint32_t height, uint32_t width, uint8_t *yuv, uint8_t *rgb) const final;

///
/// \brief Scale the image to the target size.
Expand Down
49 changes: 30 additions & 19 deletions src/wrappers/rtp_sap_wrapper.h
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,16 @@ class RtpSapTransmit {
return data_buffer_;
}

///
/// \brief Get the frame buffer, resized and ready to use
///
/// \return vector<uint8_t>&
///
std::vector<uint8_t>& GetBuffer(uint32_t width, uint32_t height, ::mediax::rtp::ColourspaceType encoding) {
data_buffer_.resize(width * height * (BitsPerPixel(encoding) / 8));
return data_buffer_;
}

///
/// \brief Get the frame buffer containing a pre-defined test pattern
///
Expand All @@ -82,6 +92,7 @@ class RtpSapTransmit {
/// + 7=Black
/// + 8=White
/// + 9=White Noise
/// + 10=Bouncing Ball
///
/// \param pattern The test pattern generate from the list above, see rtp_utils.h
/// \return std::vector<uint8_t>&
Expand All @@ -99,55 +110,55 @@ class RtpSapTransmit {
///
std::vector<uint8_t>& GetBufferTestPattern(uint32_t height, uint32_t width, ::mediax::rtp::ColourspaceType encoding,
uint32_t pattern = 0) {
std::vector<uint8_t>& buffer = GetBuffer();
std::vector<uint8_t>& buffer = GetBuffer(width, height, encoding);

switch (pattern) {
case 0:
CreateColourBarEbuTestCard(buffer.data(), stream_info_.width, stream_info_.height, stream_info_.encoding);
CreateColourBarEbuTestCard(buffer.data(), width, height, encoding);
break;
case 1:
CreateColourBarTestCard(buffer.data(), stream_info_.width, stream_info_.height, stream_info_.encoding);
CreateColourBarTestCard(buffer.data(), width, height, encoding);
break;
case 2:
CreateGreyScaleBarTestCard(buffer.data(), stream_info_.width, stream_info_.height, stream_info_.encoding);
CreateGreyScaleBarTestCard(buffer.data(), width, height, encoding);
break;
case 3:
CreateCheckeredTestCard(buffer.data(), stream_info_.width, stream_info_.height, stream_info_.encoding);
CreateCheckeredTestCard(buffer.data(), width, height, encoding);
break;
case 4:
// red
CreateSolidTestCard(buffer.data(), stream_info_.width, stream_info_.height, 0xff, 0xff, 0xff,
stream_info_.encoding);
CreateSolidTestCard(buffer.data(), width, height, 0xff, 0xff, 0xff, encoding);
break;
case 5:
// green
CreateSolidTestCard(buffer.data(), stream_info_.width, stream_info_.height, 0x00, 0xff, 0x00,
stream_info_.encoding);
CreateSolidTestCard(buffer.data(), width, height, 0x00, 0xff, 0x00, encoding);
break;
case 6:
// blue
CreateSolidTestCard(buffer.data(), stream_info_.width, stream_info_.height, 0x00, 0x00, 0xff,
stream_info_.encoding);
CreateSolidTestCard(buffer.data(), width, height, 0x00, 0x00, 0xff, encoding);
break;
case 7:
// black
CreateSolidTestCard(buffer.data(), stream_info_.width, stream_info_.height, 0x00, 0x00, 0x00,
stream_info_.encoding);
CreateSolidTestCard(buffer.data(), width, height, 0x00, 0x00, 0x00, encoding);
break;
case 8:
// white
CreateSolidTestCard(buffer.data(), stream_info_.width, stream_info_.height, 0xff, 0xff, 0xff,
stream_info_.encoding);
CreateSolidTestCard(buffer.data(), width, height, 0xff, 0xff, 0xff, encoding);
break;
case 9:
CreateWhiteNoiseTestCard(buffer.data(), stream_info_.width, stream_info_.height, stream_info_.encoding);
// noise
CreateWhiteNoiseTestCard(buffer.data(), width, height, encoding);
break;
case 10:
// bouncing ball
CreateBouncingBallTestCard(buffer.data(), width, height, encoding);
break;
default:
// black
CreateSolidTestCard(buffer.data(), stream_info_.width, stream_info_.height, 0x00, 0x00, 0x00,
stream_info_.encoding);
CreateSolidTestCard(buffer.data(), width, height, 0x00, 0x00, 0x00, encoding);
break;
}
return GetBuffer();
return GetBuffer(width, height, encoding);
}

///
Expand Down

0 comments on commit 74eee0e

Please sign in to comment.