From 54c27f9e9944af0df88e88adf704e5b98eade64b Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Fri, 8 Sep 2023 17:51:14 +0530 Subject: [PATCH 01/19] NVR initial commit --- base/CMakeLists.txt | 41 +- base/include/AbsControlModule.h | 40 + base/include/Command.h | 203 +++- base/include/Module.h | 33 +- base/include/NVRControlModule.h | 60 ++ base/include/PipeLine.h | 2 + base/src/AbsControlModule.cpp | 96 ++ base/src/NVRControlModule.cpp | 221 +++++ base/test/abscontrolmodule_tests.cpp | 0 base/test/nvrcontrolmodule_tests.cpp | 1298 ++++++++++++++++++++++++++ 10 files changed, 1971 insertions(+), 23 deletions(-) create mode 100644 base/include/AbsControlModule.h create mode 100644 base/include/NVRControlModule.h create mode 100644 base/src/AbsControlModule.cpp create mode 100644 base/src/NVRControlModule.cpp create mode 100644 base/test/abscontrolmodule_tests.cpp create mode 100644 base/test/nvrcontrolmodule_tests.cpp diff --git a/base/CMakeLists.txt b/base/CMakeLists.txt index f9d14b705..dad089d4c 100755 --- a/base/CMakeLists.txt +++ b/base/CMakeLists.txt @@ -4,7 +4,9 @@ OPTION(ENABLE_LINUX "Use this switch to enable LINUX" ON) OPTION(ENABLE_CUDA "Use this switch to enable CUDA" ON) OPTION(ENABLE_ARM64 "Use this switch to enable ARM64" OFF) OPTION(ENABLE_WINDOWS "Use this switch to enable WINDOWS" OFF) +OPTION(ENABLE_NVR "Use this switch to enable ApraPipesNVR" OFF) +SET(APRA_NVR ON) set(VCPKG_INSTALL_OPTIONS "--clean-after-build") IF(ENABLE_CUDA) add_compile_definitions(APRA_CUDA_ENABLED) @@ -222,6 +224,8 @@ SET(CORE_FILES_H include/OverlayModule.h include/OrderedCacheOfFiles.h include/TestSignalGeneratorSrc.h + include/NVRControlModule.h + include/AbsControlModule.h ) IF(ENABLE_WINDOWS) @@ -280,6 +284,9 @@ SET(IP_FILES src/OverlayFactory.h src/OverlayFactory.cpp src/TestSignalGeneratorSrc.cpp + src/NVRControlModule.cpp + #src/NVRPipeline.cpp + src/AbsControlModule.cpp ) @@ -561,9 +568,15 @@ SET(UT_FILES test/mp4_dts_strategy_tests.cpp test/overlaymodule_tests.cpp test/testSignalGeneratorSrc_tests.cpp + test/nvrcontrolmodule_tests.cpp + test/abscontrolmodule_tests.cpp ${ARM64_UT_FILES} ${CUDA_UT_FILES} ) +SET(NVR_UT_FILES + test/utmain.cpp + # test/nvrcontrolmodule_tests.cpp +) IF(ENABLE_LINUX) list(APPEND UT_FILES @@ -574,13 +587,17 @@ ENDIF(ENABLE_LINUX) add_executable(aprapipesut ${UT_FILES}) - +#IF(APRA_NVR) +add_executable(aprapipesnvr ${NVR_UT_FILES}) +#ENDIF(APRA_NVR) IF(ENABLE_ARM64) target_include_directories ( aprapipesut PRIVATE ${JETSON_MULTIMEDIA_LIB_INCLUDE} ${FFMPEG_ROOT} ${JPEG_INCLUDE_DIR}) + target_include_directories ( aprapipesnvr PRIVATE ${JETSON_MULTIMEDIA_LIB_INCLUDE} ${FFMPEG_ROOT} ${JPEG_INCLUDE_DIR}) ENDIF(ENABLE_ARM64) IF (ENABLE_CUDA) target_include_directories ( aprapipesut PRIVATE ${NVCODEC_INCLUDE_DIR}) + target_include_directories ( aprapipesnvr PRIVATE ${NVCODEC_INCLUDE_DIR}) ENDIF (ENABLE_CUDA) @@ -609,6 +626,28 @@ target_link_libraries(aprapipesut sfml-audio ) + target_link_libraries(aprapipesnvr + aprapipes + ${JPEG_LIBRARIES} + ${LIBMP4_LIB} + ${OPENH264_LIB} + ${Boost_LIBRARIES} + ${FFMPEG_LIBRARIES} + ${OpenCV_LIBRARIES} + ${JETSON_LIBS} + ${NVCUDAToolkit_LIBS} + ${NVCODEC_LIB} + ${NVJPEGLIB_L4T} + ${CURSES_LIBRARIES} + ZXing::Core + ZXing::ZXing + BZip2::BZip2 + ZLIB::ZLIB + liblzma::liblzma + bigint::bigint + sfml-audio + ) + IF(ENABLE_WINDOWS) file(COPY ${RUNTIME_DLLS} DESTINATION Debug/) file(COPY ${RUNTIME_DLLS} DESTINATION Release/) diff --git a/base/include/AbsControlModule.h b/base/include/AbsControlModule.h new file mode 100644 index 000000000..49b26a9dd --- /dev/null +++ b/base/include/AbsControlModule.h @@ -0,0 +1,40 @@ +#pragma once +#include +#include "Module.h" + +class AbsControlModuleProps : public ModuleProps +{ +public: + AbsControlModuleProps() + { + } +}; + +class AbsControlModule : public Module +{ +public: + AbsControlModule(AbsControlModuleProps _props); + ~AbsControlModule(); + bool init(); + bool term(); + void setProps(AbsControlModuleProps& props); + bool enrollModule(std::string role, boost::shared_ptr module); + boost::shared_ptr getModuleofRole(std::string role); + AbsControlModuleProps getProps(); + boost::container::deque> pipelineModules; + std::map> moduleRoles; + +protected: + bool process(frame_container& frames); + bool validateInputPins(); + bool validateOutputPins(); + bool validateInputOutputPins(); + void addInputPin(framemetadata_sp& metadata, string& pinId); + bool handleCommand(Command::CommandType type, frame_sp& frame); + bool handlePropsChange(frame_sp& frame); + +private: + void setMetadata(framemetadata_sp& metadata); + class Detail; + boost::shared_ptr mDetail; +}; \ No newline at end of file diff --git a/base/include/Command.h b/base/include/Command.h index 78908e949..ad5ea1439 100755 --- a/base/include/Command.h +++ b/base/include/Command.h @@ -16,7 +16,14 @@ class Command Seek, DeleteWindow, CreateWindow, - PlayPause + PlayPause, + NVRCommandRecord, + NVRCommandExport, + NVRCommandExportMMQ, + NVRCommandView, + NVRCommandExportView, + MP4WriterLastTS, + MMQtimestamps }; Command() @@ -37,7 +44,7 @@ class Command CommandType getType() { return type; - } + } private: friend class boost::serialization::access; @@ -147,7 +154,7 @@ class RelayCommand : public Command ar & nextModuleId & open; } - + }; class StepCommand : public Command @@ -157,7 +164,7 @@ class StepCommand : public Command { } - + size_t getSerializeSize() { return Command::getSerializeSize(); @@ -168,7 +175,7 @@ class StepCommand : public Command friend class boost::serialization::access; template void serialize(Archive & ar, const unsigned int /* file_version */) { - ar & boost::serialization::base_object(*this); + ar & boost::serialization::base_object(*this); } @@ -272,6 +279,7 @@ class MultimediaQueueXformCommand : public Command } }; + class Mp4SeekCommand : public Command { public: @@ -305,6 +313,191 @@ class Mp4SeekCommand : public Command } }; +//NVRCommands + +class NVRCommandRecord : public Command +{ +public: + NVRCommandRecord() : Command(Command::CommandType::NVRCommandRecord) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(doRecording); + } + + bool doRecording = false; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& doRecording; + } +}; + +class NVRCommandExport : public Command +{ +public: + NVRCommandExport() : Command(Command::CommandType::NVRCommandExport) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(startExportTS) + sizeof(stopExportTS); + } + + uint64_t startExportTS = 0; + uint64_t stopExportTS = 0; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& startExportTS; + ar& stopExportTS; + } +}; + +class NVRCommandExportMMQ : public Command +{ +public: + NVRCommandExportMMQ() : Command(Command::CommandType::NVRCommandExportMMQ) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(startExportMMQ); + } + + bool startExportMMQ = true; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& startExportMMQ; + } +}; + + +class NVRCommandView : public Command +{ +public: + NVRCommandView() : Command(Command::CommandType::NVRCommandView) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(doView); + } + + bool doView = false; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& doView; + } +}; + +class NVRCommandExportView : public Command +{ +public: + NVRCommandExportView() : Command(Command::CommandType::NVRCommandExportView) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(startViewTS) + sizeof(stopViewTS); + } + + uint64_t startViewTS = 0; + uint64_t stopViewTS = 0; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& startViewTS; + ar& stopViewTS; + } +}; + +class MP4WriterLastTS : public Command +{ +public: + MP4WriterLastTS() : Command(Command::CommandType::MP4WriterLastTS) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(lastWrittenTimeStamp) + sizeof(moduleId); + } + + uint64_t lastWrittenTimeStamp = 0; + std::string moduleId; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& lastWrittenTimeStamp; + ar& moduleId; + } +}; + +class MMQtimestamps : public Command +{ +public: + MMQtimestamps() : Command(Command::CommandType::MMQtimestamps) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(firstTimeStamp) + sizeof(lastTimeStamp) + sizeof(nvrExportStart) + sizeof(nvrExportStop) +sizeof(moduleId); + } + + uint64_t firstTimeStamp = 0; + uint64_t lastTimeStamp = 0; + uint64_t nvrExportStart = 0; + uint64_t nvrExportStop = 0; + std::string moduleId; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& firstTimeStamp; + ar& lastTimeStamp; + ar& nvrExportStart; + ar& nvrExportStop; + ar& moduleId; + } +}; + class PlayPauseCommand : public Command { public: diff --git a/base/include/Module.h b/base/include/Module.h index 31163050c..4c0fbcdeb 100644 --- a/base/include/Module.h +++ b/base/include/Module.h @@ -179,6 +179,21 @@ class Module { virtual void flushQue(); bool getPlayDirection() { return mDirection; } virtual void flushQueRecursive(); + template + bool queueCommand(T& cmd) + { + auto size = cmd.getSerializeSize(); + auto frame = makeCommandFrame(size, mCommandMetadata); + + Utils::serialize(cmd, frame->data(), size); + + // add to que + frame_container frames; + frames.insert(make_pair("command", frame)); + Module::push(frames); + + return true; + } protected: virtual boost_deque getFrames(frame_container& frames); virtual bool process(frame_container& frames) { return false; } @@ -220,22 +235,6 @@ class Module { return true; } - template - bool queueCommand(T& cmd) - { - auto size = cmd.getSerializeSize(); - auto frame = makeCommandFrame(size, mCommandMetadata); - - Utils::serialize(cmd, frame->data(), size); - - // add to que - frame_container frames; - frames.insert(make_pair("command", frame)); - Module::push(frames); - - return true; - } - template void getCommand(T& cmd, frame_sp& frame) { @@ -340,7 +339,7 @@ class Module { }; FFBufferMaker createFFBufferMaker(); - + boost::shared_ptr controlModule = nullptr; private: void setSieveDisabledFlag(bool sieve); frame_sp makeFrame(size_t size, framefactory_sp& framefactory); diff --git a/base/include/NVRControlModule.h b/base/include/NVRControlModule.h new file mode 100644 index 000000000..976468135 --- /dev/null +++ b/base/include/NVRControlModule.h @@ -0,0 +1,60 @@ +#pragma once +#include "Module.h" +#include "AbsControlModule.h" + +class NVRControlModuleProps : public AbsControlModuleProps +{ +public: + NVRControlModuleProps() + { + } + size_t getSerializeSize() + { + return ModuleProps::getSerializeSize(); + } +private: + friend class boost::serialization::access; + + template + void serialize(Archive& ar, const unsigned int version) + { + ar& boost::serialization::base_object(*this); + } +}; + +class NVRControlModule : public AbsControlModule +{ + public: + NVRControlModule(NVRControlModuleProps _props); + ~NVRControlModule(); + bool init(); + bool term(); + void setProps(NVRControlModuleProps& props); + NVRControlModuleProps getProps(); + bool validateModuleRoles(); + bool nvrRecord(bool record); + bool nvrExport(uint64_t startTime, uint64_t stopTime); + bool nvrExportView(uint64_t startTime, uint64_t stopTime); + bool nvrView(bool view); + bool isRendererPaused = false; + uint64_t pausedTS = 0; + uint64_t mp4lastWrittenTS = 0; + uint64_t firstMMQtimestamp = 0; + uint64_t lastMMQtimestamp = 0; + uint64_t givenStart = 0; + uint64_t givenStop = 0; + uint64_t mp4_2_lastWrittenTS = 0; + bool isExporting = false; + +protected: + bool validateInputPins(); + bool validateOutputPins(); + bool validateInputOutputPins(); + bool handleCommand(Command::CommandType type, frame_sp& frame); + bool handlePropsChange(frame_sp& frame); + +private: + void setMetadata(framemetadata_sp& metadata); + class Detail; + boost::shared_ptr mDetail; +}; \ No newline at end of file diff --git a/base/include/PipeLine.h b/base/include/PipeLine.h index b99d3fb86..7fee27728 100755 --- a/base/include/PipeLine.h +++ b/base/include/PipeLine.h @@ -1,6 +1,7 @@ #pragma once #include #include +#include "NVRControlModule.h" #include "enum_macros.h" #include @@ -36,6 +37,7 @@ class PipeLine { ~PipeLine(); std::string getName() { return mName; } bool appendModule(boost::shared_ptr pModule); + bool addControlModule(boost::shared_ptrcModule); bool init(); void run_all_threaded(); void run_all_threaded_withpause(); diff --git a/base/src/AbsControlModule.cpp b/base/src/AbsControlModule.cpp new file mode 100644 index 000000000..cc8e270e7 --- /dev/null +++ b/base/src/AbsControlModule.cpp @@ -0,0 +1,96 @@ +#include "stdafx.h" +#include +#include "AbsControlModule.h" +#include "Module.h" +#include "Command.h" + +class AbsControlModule::Detail +{ +public: + Detail(AbsControlModuleProps& _props) : mProps(_props) + { + } + + ~Detail() + { + } + AbsControlModuleProps mProps; +}; + +AbsControlModule::AbsControlModule(AbsControlModuleProps _props) + :Module(TRANSFORM, "NVRControlModule", _props) +{ + mDetail.reset(new Detail(_props)); +} +AbsControlModule::~AbsControlModule() {} + +bool AbsControlModule::validateInputPins() +{ + return true; +} + +bool AbsControlModule::validateOutputPins() +{ + return true; +} + +bool AbsControlModule::validateInputOutputPins() +{ + return true; +} + +void AbsControlModule::addInputPin(framemetadata_sp& metadata, string& pinId) +{ + Module::addInputPin(metadata, pinId); +} + +bool AbsControlModule::handleCommand(Command::CommandType type, frame_sp& frame) +{ + return true; +} + +bool AbsControlModule::handlePropsChange(frame_sp& frame) +{ + return true; +} + +bool AbsControlModule::init() +{ + if (!Module::init()) + { + return false; + } + return true; +} + +bool AbsControlModule::term() +{ + return Module::term(); +} + +AbsControlModuleProps AbsControlModule::getProps() +{ + fillProps(mDetail->mProps); + return mDetail->mProps; +} + +void AbsControlModule::setProps(AbsControlModuleProps& props) +{ + Module::addPropsToQueue(props); +} + +bool AbsControlModule::process(frame_container& frames) +{ + return true; +} + +bool AbsControlModule::enrollModule(std::string role, boost::shared_ptr module) +{ + moduleRoles[role] = module; + return true; +} + +boost::shared_ptr AbsControlModule::getModuleofRole(std::string role) +{ + return moduleRoles[role]; +} \ No newline at end of file diff --git a/base/src/NVRControlModule.cpp b/base/src/NVRControlModule.cpp new file mode 100644 index 000000000..d2c09197e --- /dev/null +++ b/base/src/NVRControlModule.cpp @@ -0,0 +1,221 @@ +#include +#include +#include "NVRControlModule.h" +#include "Mp4WriterSink.h" +#include "Module.h" +#include "Command.h" + +class NVRControlModule::Detail +{ +public: + Detail(NVRControlModuleProps& _props) : mProps(_props) + { + } + + ~Detail() + { + } + void setProps(NVRControlModuleProps _props) + { + mProps = _props; + } + NVRControlModuleProps mProps; +}; + + +NVRControlModule::NVRControlModule(NVRControlModuleProps _props) + :AbsControlModule(_props) +{ + mDetail.reset(new Detail(_props)); +} + +NVRControlModule::~NVRControlModule() {} + +bool NVRControlModule::validateInputPins() +{ + return true; +} + +bool NVRControlModule::validateOutputPins() +{ + return true; +} + +bool NVRControlModule::validateInputOutputPins() +{ + return true; +} + +bool NVRControlModule::handleCommand(Command::CommandType type, frame_sp& frame) +{ + + if (type == Command::CommandType::NVRCommandView) + { + NVRCommandView cmd; + getCommand(cmd, frame); + for (int i = 0; i < pipelineModules.size(); i++) + { + if (pipelineModules[i] == getModuleofRole("Renderer_1")) // Logic for detecting modules to add + { + auto myId = pipelineModules[i]->getId(); + pipelineModules[i]->queueCommand(cmd); + } + } + // if(cmd.doView == false) + // { + // LOG_ERROR<<" PAUSED COMMAND SENT TO MMQ - paused start is !!"<getId(); + // pipelineModules[i]->queueCommand(cmd); + // } + // } + + // } + return true; + } + if (type == Command::CommandType::NVRCommandExportView) + { + NVRCommandExportView cmd; + getCommand(cmd, frame); + givenStart = cmd.startViewTS; + givenStop = cmd.stopViewTS; + if(pausedTS < firstMMQtimestamp) + { + LOG_ERROR<<" The seeked start time is in disk!!"; + Mp4SeekCommand command; + command.seekStartTS = pausedTS; + command.forceReopen = false; + for (int i = 0; i < pipelineModules.size(); i++) + { + if (pipelineModules[i] == getModuleofRole("Reader_1")) // Sending command to reader + { + auto myId = pipelineModules[i]->getId(); + pipelineModules[i]->queueCommand(command); + pipelineModules[i]->play(true); + return true; + } + } + } + else + { + LOG_ERROR<<" The seeked start time is in MULTIMEDIA-QUEUE!!"; + MultimediaQueueXformCommand cmd; + cmd.startTime = pausedTS; + cmd.endTime = 1694340826000; + for (int i = 0; i < pipelineModules.size(); i++) + { + if (pipelineModules[i] == getModuleofRole("MultimediaQueue")) // Sending command to multimediaQueue + { + auto myid = pipelineModules[i]->getId(); + pipelineModules[i]->queueCommand(cmd); + } + } + } + + return true; + + } + + if (type == Command::CommandType::MMQtimestamps) + { + MMQtimestamps cmd; + getCommand(cmd, frame); + firstMMQtimestamp = cmd.firstTimeStamp; + lastMMQtimestamp = cmd.lastTimeStamp; + return true; + } + + return Module::handleCommand(type, frame); +} + +bool NVRControlModule::handlePropsChange(frame_sp& frame) +{ + NVRControlModuleProps props(mDetail->mProps); + auto ret = Module::handlePropsChange(frame, props); + mDetail->setProps(props); + return ret; +} + +bool NVRControlModule::init() +{ + if (!Module::init()) + { + return false; + } + return true; +} + +bool NVRControlModule::term() +{ + return Module::term(); +} + +NVRControlModuleProps NVRControlModule::getProps() +{ + fillProps(mDetail->mProps); + return mDetail->mProps; +} + +void NVRControlModule::setProps(NVRControlModuleProps& props) +{ + Module::addPropsToQueue(props); +} + +bool NVRControlModule::validateModuleRoles() +{ + for (int i = 0; i < pipelineModules.size(); i++) + { + bool modPresent = false; + for (auto it = moduleRoles.begin(); it != moduleRoles.end(); it++) + { + if (pipelineModules[i] == it->second) + { + modPresent = true; + } + } + if (!modPresent) + { + LOG_ERROR << "Modules and roles validation failed!!"; + } + } + return true; +} + +bool NVRControlModule::nvrRecord(bool record) +{ + NVRCommandRecord cmd; + cmd.doRecording = record; + return queueCommand(cmd); +} + +bool NVRControlModule::nvrExport(uint64_t ts, uint64_t te) +{ + NVRCommandExport cmd; + cmd.startExportTS = ts; + cmd.stopExportTS = te; + return queueCommand(cmd); +} + +bool NVRControlModule::nvrExportView(uint64_t ts, uint64_t te) +{ + NVRCommandExportView cmd; + cmd.startViewTS = ts; + cmd.stopViewTS = te; + return queueCommand(cmd); +} + +bool NVRControlModule::nvrView(bool view) +{ + NVRCommandView cmd; + cmd.doView = view; + return queueCommand(cmd); +} \ No newline at end of file diff --git a/base/test/abscontrolmodule_tests.cpp b/base/test/abscontrolmodule_tests.cpp new file mode 100644 index 000000000..e69de29bb diff --git a/base/test/nvrcontrolmodule_tests.cpp b/base/test/nvrcontrolmodule_tests.cpp new file mode 100644 index 000000000..7c243c803 --- /dev/null +++ b/base/test/nvrcontrolmodule_tests.cpp @@ -0,0 +1,1298 @@ +#include +#include +#include +#include +#include +#include +//#include "NVRPipeline.h" +#include "PipeLine.h" +#include "ExternalSinkModule.h" +#include "EncodedImageMetadata.h" +#include "FrameContainerQueue.h" +#include "Module.h" +#include "Utils.h" +#include "NVRControlModule.h" +#include "WebCamSource.h" +#include "H264EncoderNVCodec.h" +#include "Mp4WriterSink.h" +#include "CudaMemCopy.h" +#include "CudaStreamSynchronize.h" +#include "ColorConversionXForm.h" +#include "FileReaderModule.h" +#include "ImageViewerModule.h" +#include "KeyboardListener.h" +#include "MultimediaQueueXform.h" +#include "H264Metadata.h" +#include "ValveModule.h" +#include "Mp4ReaderSource.h" +#include "Mp4VideoMetadata.h" +#include "FileWriterModule.h" +#include "NvV4L2Camera.h" //Jetson +#include "NvTransform.h" +#include "EglRenderer.h" + +BOOST_AUTO_TEST_SUITE(nvrcontrolmodule_tests) + + +// struct CheckThread { +// class SourceModuleProps : public ModuleProps +// { +// public: +// SourceModuleProps() : ModuleProps() +// {}; +// }; +// class TransformModuleProps : public ModuleProps +// { +// public: +// TransformModuleProps() : ModuleProps() +// {}; +// }; +// class SinkModuleProps : public ModuleProps +// { +// public: +// SinkModuleProps() : ModuleProps() +// {}; +// }; + +// class SourceModule : public Module +// { +// public: +// SourceModule(SourceModuleProps props) : Module(SOURCE, "sourceModule", props) +// { +// }; + +// protected: +// bool process() { return false; } +// bool validateOutputPins() +// { +// return true; +// } +// bool validateInputPins() +// { +// return true; +// } +// }; +// class TransformModule : public Module +// { +// public: +// TransformModule(TransformModuleProps props) :Module(TRANSFORM, "transformModule", props) {}; +// protected: +// bool process() { return false; } +// bool validateOutputPins() +// { +// return true; +// } +// bool validateInputPins() +// { +// return true; +// } +// }; +// class SinkModule : public Module +// { +// public: +// SinkModule(SinkModuleProps props) :Module(SINK, "mp4WritersinkModule", props) {}; +// protected: +// bool process() { return false; } +// bool validateOutputPins() +// { +// return true; +// } +// bool validateInputPins() +// { +// return true; +// } +// }; +// }; + +// void key_func(boost::shared_ptr& mControl) +// { + +// while (true) { +// int k; +// k = getchar(); +// if (k == 97) +// { +// BOOST_LOG_TRIVIAL(info) << "Starting Render!!"; +// mControl->nvrView(true); +// mControl->step(); +// } +// if (k == 100) +// { +// BOOST_LOG_TRIVIAL(info) << "Stopping Render!!"; +// mControl->nvrView(false); +// mControl->step(); +// } +// if (k == 101) +// { +// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); +// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); +// uint64_t seekStartTS = now - 180000; +// uint64_t seekEndTS = now + 120000; +// mControl->nvrExport(seekStartTS, seekEndTS); +// mControl->step(); +// } +// if (k == 114) +// { +// BOOST_LOG_TRIVIAL(info) << "Starting Reading from disk!!"; +// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); +// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); +// uint64_t seekStartTS = now - 5000; +// uint64_t seekEndTS = now; +// mControl->nvrExport(seekStartTS, seekEndTS); +// mControl->step(); +// } +// else +// { +// BOOST_LOG_TRIVIAL(info) << "The value pressed is .."<< k; +// } +// } +// } + +// void key_Read_func(boost::shared_ptr& mControl, boost::shared_ptr& mp4Reader) +// { + +// while (true) { +// int k; +// k = getchar(); +// if (k == 97) +// { +// BOOST_LOG_TRIVIAL(info) << "Starting Render!!"; +// mControl->nvrView(true); +// mControl->step(); +// } +// if (k == 100) +// { +// BOOST_LOG_TRIVIAL(info) << "Stopping Render!!"; +// mControl->nvrView(false); +// mControl->step(); +// } +// if (k == 101) +// { +// /*uint64_t x, y; +// cout << "Enter start time of Export : "; +// cin >> x; +// cout << "Enter end time of Export : "; +// cin >> y; +// cout << "Start time is " << x << " End time is " << y;*/ +// BOOST_LOG_TRIVIAL(info) << "Starting Reading from disk!!"; +// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); +// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); +// uint64_t seekStartTS = now - 180000; +// uint64_t seekEndTS = now + 120000; +// mControl->nvrExport(seekStartTS, seekEndTS); +// mControl->step(); +// } +// if (k == 114) +// { +// BOOST_LOG_TRIVIAL(info) << "Starting Reading from disk!!"; +// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); +// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); +// uint64_t seekStartTS = now + 30000; +// uint64_t seekEndTS = now + 60000; +// mControl->nvrExport(seekStartTS, seekEndTS); +// mControl->step(); +// mp4Reader->play(true); +// } +// if (k == 112) +// { +// BOOST_LOG_TRIVIAL(info) << "Stopping Pipeline Input"; +// } + +// else +// { +// BOOST_LOG_TRIVIAL(info) << "The value pressed is .." << k; +// } +// } +// } + +// BOOST_AUTO_TEST_CASE(basic) +// { +// CheckThread f; + +// auto m1 = boost::shared_ptr(new CheckThread::SourceModule(CheckThread::SourceModuleProps())); +// auto metadata1 = framemetadata_sp(new FrameMetadata(FrameMetadata::ENCODED_IMAGE)); +// m1->addOutputPin(metadata1); +// auto m2 = boost::shared_ptr(new CheckThread::TransformModule(CheckThread::TransformModuleProps())); +// m1->setNext(m2); +// auto metadata2 = framemetadata_sp(new FrameMetadata(FrameMetadata::ENCODED_IMAGE)); +// m2->addOutputPin(metadata2); +// auto m3 = boost::shared_ptr(new CheckThread::TransformModule(CheckThread::TransformModuleProps())); +// m2->setNext(m3); +// auto metadata3 = framemetadata_sp(new FrameMetadata(FrameMetadata::ENCODED_IMAGE)); +// m3->addOutputPin(metadata3); +// auto m4 = boost::shared_ptr(new CheckThread::SinkModule(CheckThread::SinkModuleProps())); +// m3->setNext(m4); +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// // add all source modules +// p.appendModule(m1); +// // add control module if any +// p.addControlModule(mControl); +// mControl->enrollModule("source", m1); +// mControl->enrollModule("transform_1", m2); +// mControl->enrollModule("writer", m3); +// mControl->enrollModule("sink", m4); +// // init +// p.init(); +// // control init - do inside pipeline init +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(10)); +// mControl->nvrView(false); +// // dont need step in run_all_threaded +// mControl->step(); + +// boost::this_thread::sleep_for(boost::chrono::seconds(10)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// } + +// BOOST_AUTO_TEST_CASE(checkNVR) +// { +// auto nvrPipe = boost::shared_ptr(new NVRPipeline()); +// nvrPipe->open(); +// //nvrPipe->startRecording(); +// nvrPipe->close(); +// } + +// BOOST_AUTO_TEST_CASE(NVRTest) +// { +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 640; +// auto height = 360; + +// // test with 0 - with multiple cameras + +// WebCamSourceProps webCamSourceprops(0, 1920, 1080); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); +// webCam->setNext(colorConvt); +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); +// copy->setNext(encoder); +// std::string outFolderPath = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps = Mp4WriterSinkProps(1, 1, 24, outFolderPath); +// mp4WriterSinkProps.logHealth = true; +// mp4WriterSinkProps.logHealthFrequency = 10; +// auto mp4Writer = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps)); +// encoder->setNext(mp4Writer); +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// p.appendModule(webCam); +// // add control module if any +// p.addControlModule(mControl); +// mControl->enrollModule("source", webCam); +// mControl->enrollModule("colorConversion", colorConvt); +// mControl->enrollModule("cudaCopy", copy); +// mControl->enrollModule("encoder", encoder); +// mControl->enrollModule("Writer-1", mp4Writer); +// // init +// p.init(); +// // control init - do inside pipeline init +// mControl->init(); +// p.run_all_threaded(); +// //mControl->nvrRecord(true); +// // dont need step in run_all_threaded +// mControl->step(); + +// boost::this_thread::sleep_for(boost::chrono::seconds(240)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// } + +// BOOST_AUTO_TEST_CASE(NVRView) +// { +// WebCamSourceProps webCamSourceprops(0, 1920, 1080); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto multique = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(10000, 5000, true))); +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// webCam->setNext(multique); +// multique->setNext(view); +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// p.appendModule(webCam); +// boost::thread inp(key_func, mControl); +// p.addControlModule(mControl); +// mControl->enrollModule("filereader", webCam); +// mControl->enrollModule("viewer", view); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(30)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(NVRViewKey) +// { +// WebCamSourceProps webCamSourceprops(0, 1920, 1080); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// webCam->setNext(view); +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// std::thread inp(key_func, std::ref(mControl)); +// p.appendModule(webCam); +// p.addControlModule(mControl); +// mControl->enrollModule("filereader", webCam); +// mControl->enrollModule("viewer", view); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(100)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// inp.join(); +// } + + +// BOOST_AUTO_TEST_CASE(NVRFile) +// { +// std::string inFolderPath = "./data/Raw_YUV420_640x360"; +// auto fileReaderProps = FileReaderModuleProps(inFolderPath, 0, -1); +// fileReaderProps.fps = 20; +// fileReaderProps.readLoop = true; +// auto fileReader = boost::shared_ptr(new FileReaderModule(fileReaderProps)); // +// auto metadata = framemetadata_sp(new RawImageMetadata(640, 360, ImageMetadata::ImageType::MONO, CV_8UC1, 0, CV_8U, FrameMetadata::HOST, true)); +// auto pinId = fileReader->addOutputPin(metadata); +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// fileReader->setNext(view); +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// p.appendModule(fileReader); +// p.addControlModule(mControl); +// mControl->enrollModule("filereader", fileReader); +// mControl->enrollModule("viewer", view); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); + +// boost::this_thread::sleep_for(boost::chrono::seconds(15)); +// mControl->nvrView(false); +// mControl->step(); +// boost::this_thread::sleep_for(boost::chrono::seconds(10)); +// mControl->nvrView(true); +// mControl->step(); +// boost::this_thread::sleep_for(boost::chrono::seconds(15)); + +// p.stop(); +// p.term(); +// p.wait_for_all(); +// } + +// BOOST_AUTO_TEST_CASE(NVRkey) +// { +// std::string inFolderPath = "./data/h264_data"; +// auto fileReaderProps = FileReaderModuleProps(inFolderPath, 0, -1); +// fileReaderProps.fps = 20; +// fileReaderProps.readLoop = true; +// auto fileReader = boost::shared_ptr(new FileReaderModule(fileReaderProps)); // +// auto encodedImageMetadata = framemetadata_sp(new H264Metadata(704, 576)); +// auto pinId = fileReader->addOutputPin(encodedImageMetadata); +// //auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// fileReader->setNext(mp4Writer_1); +// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; +// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_2); +// mp4WriterSinkProps_2.logHealth = true; +// mp4WriterSinkProps_2.logHealthFrequency = 10; +// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); +// //fileReader->setNext(mp4Writer_2); +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// //std::thread inp(key_func, mControl); +// p.appendModule(fileReader); +// p.addControlModule(mControl); +// mControl->enrollModule("filereader", fileReader); +// mControl->enrollModule("writer", mp4Writer_1); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(10)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// //inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(NVR_mmq) +// { +// std::string inFolderPath = "./data/h264_data"; +// auto fileReaderProps = FileReaderModuleProps(inFolderPath, 0, -1); +// fileReaderProps.fps = 20; +// fileReaderProps.readLoop = true; +// auto fileReader = boost::shared_ptr(new FileReaderModule(fileReaderProps)); // +// auto encodedImageMetadata = framemetadata_sp(new H264Metadata(704, 576)); +// auto pinId = fileReader->addOutputPin(encodedImageMetadata); + +// auto multiQueue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(30000, 5000, true))); +// fileReader->setNext(multiQueue); + +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// multiQueue->setNext(mp4Writer_1); + +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// std::thread inp(key_func, std::ref(mControl)); +// p.appendModule(fileReader); +// p.addControlModule(mControl); +// mControl->enrollModule("filereader", fileReader); +// mControl->enrollModule("multimediaQueue", multiQueue); +// mControl->enrollModule("writer", mp4Writer_1); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(50)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(NVR_mmq_view) +// { +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 1920; +// auto height = 1020; + + +// WebCamSourceProps webCamSourceprops(0, 1920, 1080); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); +// webCam->setNext(colorConvt); + +// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); +// webCam->setNext(colorConvtView); + +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// colorConvtView->setNext(view); + +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); + +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); +// copy->setNext(encoder); + + +// auto multiQueue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(30000, 5000, true))); +// encoder->setNext(multiQueue); + +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// multiQueue->setNext(mp4Writer_1); + +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// std::thread inp(key_func, std::ref(mControl)); +// p.appendModule(webCam); +// p.addControlModule(mControl); +// mControl->enrollModule("webcamera", webCam); +// mControl->enrollModule("multimediaQueue", multiQueue); +// mControl->enrollModule("writer", mp4Writer_1); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(60)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(checkNVR2) //Use this for testing pipeline note - Only one mp4Writer is present in this pipeline +// { +// LoggerProps loggerProps; +// loggerProps.logLevel = boost::log::trivial::severity_level::info; +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// Logger::initLogger(loggerProps); + +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 640; +// auto height = 360; + + +// WebCamSourceProps webCamSourceprops(0, 1920, 1080); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); +// webCam->setNext(colorConvt); + +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); +// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); +// webCam->setNext(colorConvtView); +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// colorConvtView->setNext(view); +// H264EncoderNVCodecProps encProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames); +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(encProps)); +// copy->setNext(encoder); + +// auto multiQueue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(10000, 5000, true))); +// encoder->setNext(multiQueue); + +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// multiQueue->setNext(mp4Writer_1); + +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); + +// PipeLine p("test"); +// std::thread inp(key_func, std::ref(mControl)); +// p.appendModule(webCam); +// p.addControlModule(mControl); +// mControl->enrollModule("WebCamera", webCam); +// mControl->enrollModule("Renderer", view); +// mControl->enrollModule("Writer-1", mp4Writer_1); +// mControl->enrollModule("MultimediaQueue", multiQueue); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(360)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(checkNVR3) //Use this for testing pipeline note - Mimics the actual pipeline +// { +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 640; //1920 +// auto height = 360; //1020 + + +// WebCamSourceProps webCamSourceprops(0, 640, 360); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); +// webCam->setNext(colorConvt); + +// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); +// webCam->setNext(colorConvtView); + +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// colorConvtView->setNext(view); + +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); + +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); +// copy->setNext(encoder); + +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// encoder->setNext(mp4Writer_1); + +// auto multiQue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(10000, 5000, true))); +// encoder->setNext(multiQue); +// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; +// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); +// mp4WriterSinkProps_2.logHealth = true; +// mp4WriterSinkProps_2.logHealthFrequency = 10; +// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); +// multiQue->setNext(mp4Writer_2); + +// //auto fileWriter = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps("./data/testOutput/h264images/Raw_YUV420_640x360????.h264"))); +// //multiQue->setNext(fileWriter); + +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); +// PipeLine p("test"); +// std::thread inp(key_func, std::ref(mControl)); +// p.appendModule(webCam); +// p.addControlModule(mControl); +// mControl->enrollModule("WebCamera", webCam); +// mControl->enrollModule("Renderer", view); +// mControl->enrollModule("Writer-1", mp4Writer_1); +// mControl->enrollModule("MultimediaQueue", multiQue); +// mControl->enrollModule("Writer-2", mp4Writer_2); + +// p.init(); +// mControl->init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(360)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(mp4Read) +// { +// LoggerProps loggerProps; +// loggerProps.logLevel = boost::log::trivial::severity_level::info; +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// Logger::initLogger(loggerProps); + +// std::string skipDir = "./data/testOutput/mp4_videos/24bpp"; +// std::string startingVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0013/1669188939867.mp4"; +// std::string outPath = "data/testOutput/outFrames"; +// uint64_t seekStartTS = 1669119595641; +// uint64_t seekEndTS = 1669119595641 + 10000; +// boost::filesystem::path file("frame_??????.h264"); +// auto frameType = FrameMetadata::FrameType::H264_DATA; +// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); + +// boost::filesystem::path dir(outPath); + +// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); +// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); +// mp4Reader->addOutPutPin(h264ImageMetadata); +// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); +// mp4Reader->addOutPutPin(mp4Metadata); + +// mp4ReaderProps.skipDir = skipDir; + +// boost::filesystem::path full_path = dir / file; +// LOG_INFO << full_path; +// //std::string outFolderPath_2 = "./data/testOutput/testVids"; +// //auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); +// //mp4WriterSinkProps_2.logHealth = true; +// //mp4WriterSinkProps_2.logHealthFrequency = 10; +// //auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); +// //mp4Reader->setNext(mp4Writer_2); +// auto fileWriterProps = FileWriterModuleProps("./data/testOutput/mp4WriterModuleFrame_ ????.h264"); +// auto fileWriter = boost::shared_ptr(new FileWriterModule(fileWriterProps)); +// std::vector mImagePin; +// mImagePin = mp4Reader->getAllOutputPinsByType(frameType); +// mp4Reader->setNext(fileWriter, mImagePin); +// boost::shared_ptr p; +// p = boost::shared_ptr(new PipeLine("test")); +// p->appendModule(mp4Reader); + +// if (!p->init()) +// { +// throw AIPException(AIP_FATAL, "Engine Pipeline init failed. Check IPEngine Logs for more details."); +// } + +// mp4Reader->setProps(mp4ReaderProps); +// mp4Reader->randomSeek(seekStartTS, seekEndTS); + +// p->run_all_threaded(); + +// boost::this_thread::sleep_for(boost::chrono::seconds(10)); + +// p->stop(); +// p->term(); +// p->wait_for_all(); +// p.reset(); +// } + +// BOOST_AUTO_TEST_CASE(mp4ReadView) +// { +// LoggerProps loggerProps; +// loggerProps.logLevel = boost::log::trivial::severity_level::info; +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// Logger::initLogger(loggerProps); + +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 640; //1920 +// auto height = 360; //1020 + +// //WebCam pipeline +// WebCamSourceProps webCamSourceprops(0, 640, 360); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); +// webCam->setNext(colorConvt); + +// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); +// webCam->setNext(colorConvtView); + +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// colorConvtView->setNext(view); + +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); + +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); +// copy->setNext(encoder); + +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// encoder->setNext(mp4Writer_1); + +// //Reader pipeline +// //std::string skipDir = "./data/Mp4_videos/h264_video_metadata/"; +// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; +// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; +// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0017/"; +// boost::filesystem::path file("frame_??????.h264"); +// auto frameType = FrameMetadata::FrameType::H264_DATA; +// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); + +// boost::filesystem::path dir(outPath); + +// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); +// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); +// mp4Reader->addOutPutPin(h264ImageMetadata); +// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); +// mp4Reader->addOutPutPin(mp4Metadata); + +// //mp4ReaderProps.skipDir = skipDir; + +// boost::filesystem::path full_path = dir / file; +// LOG_INFO << full_path; +// /*std::string outFolderPath_2 = "./data/testOutput/testVids"; +// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); +// mp4WriterSinkProps_2.logHealth = true; +// mp4WriterSinkProps_2.logHealthFrequency = 10; +// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); +// mp4Reader->setNext(mp4Writer_2);*/ //fileWriterModuleFrame_????.jpg +// auto fileWriterProps = FileWriterModuleProps("./data/testOutput/mp4WriterModuleFrame_????.h264"); +// auto fileWriter = boost::shared_ptr(new FileWriterModule(fileWriterProps)); +// std::vector mImagePin; +// mImagePin = mp4Reader->getAllOutputPinsByType(frameType); +// mp4Reader->setNext(fileWriter, mImagePin); +// //Pipeline + +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); +// PipeLine p("test"); +// std::thread inp(key_Read_func,std::ref(mControl), std::ref(mp4Reader)); +// p.appendModule(webCam); +// p.appendModule(mp4Reader); +// p.addControlModule(mControl); +// mControl->enrollModule("WebCamera", webCam); +// mControl->enrollModule("Renderer", view); +// mControl->enrollModule("Writer-1", mp4Writer_1); +// mControl->enrollModule("Reader", mp4Reader); +// //mControl->enrollModule("Writer-2", mp4Writer_2); + +// p.init(); +// mControl->init(); +// mp4Reader->play(false); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(150)); +// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221023/0018/"))) +// { +// if (boost::filesystem::is_regular_file(folder)) +// { +// boost::filesystem::path p = folder.path(); +// changedVideoPath = p.string(); +// break; +// } +// } +// Mp4ReaderSourceProps propsChange(changedVideoPath, true); +// mp4Reader->setProps(propsChange); +// boost::this_thread::sleep_for(boost::chrono::seconds(360)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(mp4ReadWrite) +// { +// LoggerProps loggerProps; +// loggerProps.logLevel = boost::log::trivial::severity_level::info; +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// Logger::initLogger(loggerProps); + +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 640; //1920 +// auto height = 360; //1020 + +// //WebCam pipeline +// WebCamSourceProps webCamSourceprops(0, 640, 360); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); +// webCam->setNext(colorConvt); + +// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); +// webCam->setNext(colorConvtView); + +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// colorConvtView->setNext(view); + +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); + +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); +// copy->setNext(encoder); + +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// encoder->setNext(mp4Writer_1); + +// //Reader pipeline +// //std::string skipDir = "./data/Mp4_videos/h264_video_metadata/"; +// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; +// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; +// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0017/"; +// boost::filesystem::path file("frame_??????.h264"); +// auto frameType = FrameMetadata::FrameType::H264_DATA; +// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); + +// boost::filesystem::path dir(outPath); + +// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); +// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); +// mp4Reader->addOutPutPin(h264ImageMetadata); +// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); +// mp4Reader->addOutPutPin(mp4Metadata); + +// //mp4ReaderProps.skipDir = skipDir; + +// boost::filesystem::path full_path = dir / file; +// LOG_INFO << full_path; +// std::string outFolderPath_2 = "./data/testOutput/testVids"; +// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); +// mp4WriterSinkProps_2.logHealth = true; +// mp4WriterSinkProps_2.logHealthFrequency = 10; +// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); +// mp4Reader->setNext(mp4Writer_2); //fileWriterModuleFrame_????.jpg +// //auto fileWriterProps = FileWriterModuleProps("./data/testOutput/mp4WriterModuleFrame_????.h264"); +// //auto fileWriter = boost::shared_ptr(new FileWriterModule(fileWriterProps)); +// //std::vector mImagePin; +// //mImagePin = mp4Reader->getAllOutputPinsByType(frameType); +// //mp4Reader->setNext(fileWriter, mImagePin); +// //Pipeline + +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); +// PipeLine p("test"); +// std::thread inp(key_Read_func,std::ref(mControl), std::ref(mp4Reader)); +// p.appendModule(webCam); +// p.appendModule(mp4Reader); +// p.addControlModule(mControl); +// mControl->enrollModule("WebCamera", webCam); +// mControl->enrollModule("Renderer", view); +// mControl->enrollModule("Writer-1", mp4Writer_1); +// mControl->enrollModule("Reader", mp4Reader); +// mControl->enrollModule("Writer-2", mp4Writer_2); + +// p.init(); +// mControl->init(); +// mp4Reader->play(false); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(150)); +// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221024/0018/"))) +// { +// if (boost::filesystem::is_regular_file(folder)) +// { +// boost::filesystem::path p = folder.path(); +// changedVideoPath = p.string(); +// break; +// } +// } +// Mp4ReaderSourceProps propsChange(changedVideoPath, true); +// mp4Reader->setProps(propsChange); +// boost::this_thread::sleep_for(boost::chrono::seconds(360)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; +// inp.join(); +// } + + +// BOOST_AUTO_TEST_CASE(checkNVR4) //Use this for testing pipeline note - Mimics the actual pipeline +// { +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 640; //1920 +// auto height = 360; //1020 + +// //WebCam +// WebCamSourceProps webCamSourceprops(0, 640, 360); +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); +// //Color Conversion + +// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); +// webCam->setNext(colorConvtView); + +// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); +// colorConvtView->setNext(view); + +// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); +// webCam->setNext(colorConvt); //WebCam->ColorConversion + +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); + +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); +// copy->setNext(encoder); + +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 10; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// encoder->setNext(mp4Writer_1); + +// auto multiQue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(90000, 30000, true))); +// encoder->setNext(multiQue); + +// //auto fileWriter = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps("./data/testOutput/h264images/Raw_YUV420_640x360????.h264"))); +// //multiQue->setNext(fileWriter); + +// //Reader pipeline +// //std::string skipDir = "./data/Mp4_videos/h264_video_metadata/"; +// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; +// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; +// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0017/"; +// boost::filesystem::path file("frame_??????.h264"); +// auto frameType = FrameMetadata::FrameType::H264_DATA; +// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); + +// boost::filesystem::path dir(outPath); + +// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); +// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); +// mp4Reader->addOutPutPin(h264ImageMetadata); +// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); +// mp4Reader->addOutPutPin(mp4Metadata); + +// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; +// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); +// mp4WriterSinkProps_2.logHealth = true; +// mp4WriterSinkProps_2.logHealthFrequency = 10; +// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); + +// std::string outFolderPath_3 = "./data/testOutput/mp4_videos/ExportVids/"; +// auto mp4WriterSinkProps_3 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); +// mp4WriterSinkProps_3.logHealth = true; +// mp4WriterSinkProps_3.logHealthFrequency = 10; +// auto mp4Writer_3 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); + +// multiQue->setNext(mp4Writer_2); + + +// boost::filesystem::path full_path = dir / file; +// LOG_INFO << full_path; +// mp4Reader->setNext(mp4Writer_3); + +// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); +// PipeLine p("test"); +// std::thread inp(key_Read_func,std::ref(mControl), std::ref(mp4Reader)); +// p.appendModule(webCam); +// p.appendModule(mp4Reader); +// p.addControlModule(mControl); +// mControl->enrollModule("WebCamera", webCam); +// mControl->enrollModule("Reader", mp4Reader); +// mControl->enrollModule("Renderer", view); +// mControl->enrollModule("Writer-1", mp4Writer_1); +// mControl->enrollModule("MultimediaQueue", multiQue); +// mControl->enrollModule("Writer-2", mp4Writer_2); +// mControl->enrollModule("Writer-3", mp4Writer_3); + +// p.init(); +// mControl->init(); +// mp4Reader->play(false); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(150)); +// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221030/0012/"))) +// { +// if (boost::filesystem::is_regular_file(folder)) +// { +// boost::filesystem::path p = folder.path(); +// changedVideoPath = p.string(); +// break; +// } +// } +// Mp4ReaderSourceProps propsChange(changedVideoPath, true); +// mp4Reader->setProps(propsChange); +// boost::this_thread::sleep_for(boost::chrono::seconds(600)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(checkNVR5) //Use this for testing pipeline note - Mimics the actual pipeline +// { +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// //Logger::initLogger(logprops); + +// auto cuContext = apracucontext_sp(new ApraCUcontext()); +// uint32_t gopLength = 25; +// uint32_t bitRateKbps = 1000; +// uint32_t frameRate = 30; +// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; +// bool enableBFrames = true; +// auto width = 640; +// auto height = 360; + +// //WebCam +// WebCamSourceProps webCamSourceprops(0, 640, 360); +// webCamSourceprops.logHealth = true; +// webCamSourceprops.logHealthFrequency = 100; +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); + +// //Color Conversion View +// auto colorProps1 = ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR); +// colorProps1.logHealth = true; +// colorProps1.logHealthFrequency = 100; +// auto colorConvtView = boost::shared_ptr(new ColorConversion(colorProps1)); +// webCam->setNext(colorConvtView); + +// //ImageViewer +// ImageViewerModuleProps imgViewerProps("NVR-View"); +// imgViewerProps.logHealth = true; +// imgViewerProps.logHealthFrequency = 100; +// auto view = boost::shared_ptr(new ImageViewerModule(imgViewerProps)); +// colorConvtView->setNext(view); + +// //Color Conversion to encoder +// auto colorProps2 = ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR); +// colorProps2.logHealth = true; +// colorProps2.logHealthFrequency = 100; +// colorProps2.fps = 30; +// auto colorConvt = boost::shared_ptr(new ColorConversion(colorProps2)); +// webCam->setNext(colorConvt); //WebCam->ColorConversion + +// //Cuda Mem Copy +// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); +// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); +// copyProps.logHealth = true; +// copyProps.logHealthFrequency = 100; +// copyProps.fps = 30; +// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); +// colorConvt->setNext(copy); + +// //H264 Encoder +// auto encoderProps = H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames); +// encoderProps.logHealth = true; +// encoderProps.logHealthFrequency = 100; +// encoderProps.fps = 30; +// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(encoderProps)); +// copy->setNext(encoder); + +// auto sinkProps = ExternalSinkModuleProps(); +// sinkProps.logHealth = true; +// sinkProps.logHealthFrequency = 100; +// auto sink = boost::shared_ptr(new ExternalSinkModule(sinkProps)); + +// //MP4 Writer-1 (24/7 writer) +// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; +// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); +// mp4WriterSinkProps_1.logHealth = true; +// mp4WriterSinkProps_1.logHealthFrequency = 100; +// mp4WriterSinkProps_1.fps = 30; +// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); +// //encoder->setNext(mp4Writer_1); +// encoder->setNext(sink); + +// //MultimediaQueue +// auto multiProps = MultimediaQueueXformProps(120000, 30000, true); +// multiProps.logHealth = true; +// multiProps.logHealthFrequency = 100; +// multiProps.fps = 30; +// auto multiQue = boost::shared_ptr(new MultimediaQueueXform(multiProps)); +// encoder->setNext(multiQue); + +// //auto fileWriter = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps("./data/testOutput/h264images/Raw_YUV420_640x360????.h264"))); +// //multiQue->setNext(fileWriter); + +// //MP4 Reader [Source] +// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; +// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; +// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0011/"; +// boost::filesystem::path file("frame_??????.h264"); +// auto frameType = FrameMetadata::FrameType::H264_DATA; +// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); +// boost::filesystem::path dir(outPath); +// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); +// mp4ReaderProps.logHealth = true; +// mp4ReaderProps.logHealthFrequency = 100; +// mp4ReaderProps.fps = 30; +// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); +// mp4Reader->addOutPutPin(h264ImageMetadata); +// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); +// mp4Reader->addOutPutPin(mp4Metadata); + +// //MP4 Writer-2 exports +// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; +// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(60, 10, 24, outFolderPath_2); +// mp4WriterSinkProps_2.logHealth = false; +// mp4WriterSinkProps_2.logHealthFrequency = 100; +// mp4WriterSinkProps_2.fps = 30; +// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); +// multiQue->setNext(mp4Writer_2); +// boost::filesystem::path full_path = dir / file; +// LOG_INFO << full_path; +// //mp4Reader->setNext(mp4Writer_2); +// mp4Reader->setNext(sink); + +// //NVR ControlModule +// auto controlProps = NVRControlModuleProps(); +// controlProps.logHealth = true; +// controlProps.logHealthFrequency = 100; +// controlProps.fps = 30; +// auto mControl = boost::shared_ptr(new NVRControlModule(controlProps)); +// Logger::setLogLevel(boost::log::trivial::severity_level::info); + + +// PipeLine p("test"); +// std::thread inp(key_Read_func, mControl, mp4Reader); +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// p.appendModule(webCam); +// p.appendModule(mp4Reader); +// p.addControlModule(mControl); +// mControl->enrollModule("WebCamera", webCam); +// mControl->enrollModule("Reader", mp4Reader); +// mControl->enrollModule("Renderer", view); +// mControl->enrollModule("Writer-1", mp4Writer_1); +// mControl->enrollModule("MultimediaQueue", multiQue); +// mControl->enrollModule("Writer-2", mp4Writer_2); +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// p.init(); +// mControl->init(); +// mp4Reader->play(false); +// p.run_all_threaded(); +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// boost::this_thread::sleep_for(boost::chrono::seconds(150)); +// Logger::setLogLevel(boost::log::trivial::severity_level::info); +// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221114/0012/"))) +// { +// if (boost::filesystem::is_regular_file(folder)) +// { +// boost::filesystem::path p = folder.path(); +// changedVideoPath = p.string(); +// break; +// } +// } +// Mp4ReaderSourceProps propsChange(changedVideoPath, true); +// mp4Reader->setProps(propsChange); +// boost::this_thread::sleep_for(boost::chrono::seconds(24000)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; +// inp.join(); +// } + +// BOOST_AUTO_TEST_CASE(dummyTester) +// { +// Logger::setLogLevel(boost::log::trivial::severity_level::info); + +// //WebCam +// WebCamSourceProps webCamSourceprops(0, 640, 360); +// webCamSourceprops.logHealth = true; +// webCamSourceprops.logHealthFrequency = 100; +// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); + +// //Color Conversion View +// auto colorProps1 = ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR); +// colorProps1.logHealth = true; +// colorProps1.logHealthFrequency = 100; +// auto colorConvtView = boost::shared_ptr(new ColorConversion(colorProps1)); +// //webCam->setNext(colorConvtView); + +// //ImageViewer +// ImageViewerModuleProps imgViewerProps("NVR-View"); +// imgViewerProps.logHealth = true; +// imgViewerProps.logHealthFrequency = 100; +// auto view = boost::shared_ptr(new ImageViewerModule(imgViewerProps)); +// webCam->setNext(view); + +// //External Sink +// auto sinkProps = ExternalSinkModuleProps(); +// sinkProps.logHealth = true; +// sinkProps.logHealthFrequency = 50; +// auto sink = boost::shared_ptr(new ExternalSinkModule(sinkProps)); +// //colorConvtView->setNext(sink); + +// PipeLine p("test"); +// p.appendModule(webCam); +// p.init(); +// p.run_all_threaded(); +// boost::this_thread::sleep_for(boost::chrono::seconds(100000)); +// p.stop(); +// p.term(); +// p.wait_for_all(); +// } + + +BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file From 9cfc6dab6b6c52cf6372b6f696fe15467776fdc6 Mon Sep 17 00:00:00 2001 From: Venkat Date: Mon, 11 Sep 2023 11:57:54 +0530 Subject: [PATCH 02/19] Removed NVRControl Module, moving to apranvr --- base/CMakeLists.txt | 38 +- base/include/NVRControlModule.h | 60 -- base/include/PipeLine.h | 3 +- base/src/NVRControlModule.cpp | 221 ----- base/test/nvrcontrolmodule_tests.cpp | 1298 -------------------------- 5 files changed, 3 insertions(+), 1617 deletions(-) delete mode 100644 base/include/NVRControlModule.h delete mode 100644 base/src/NVRControlModule.cpp delete mode 100644 base/test/nvrcontrolmodule_tests.cpp diff --git a/base/CMakeLists.txt b/base/CMakeLists.txt index dad089d4c..0a002c206 100755 --- a/base/CMakeLists.txt +++ b/base/CMakeLists.txt @@ -4,9 +4,7 @@ OPTION(ENABLE_LINUX "Use this switch to enable LINUX" ON) OPTION(ENABLE_CUDA "Use this switch to enable CUDA" ON) OPTION(ENABLE_ARM64 "Use this switch to enable ARM64" OFF) OPTION(ENABLE_WINDOWS "Use this switch to enable WINDOWS" OFF) -OPTION(ENABLE_NVR "Use this switch to enable ApraPipesNVR" OFF) -SET(APRA_NVR ON) set(VCPKG_INSTALL_OPTIONS "--clean-after-build") IF(ENABLE_CUDA) add_compile_definitions(APRA_CUDA_ENABLED) @@ -224,7 +222,6 @@ SET(CORE_FILES_H include/OverlayModule.h include/OrderedCacheOfFiles.h include/TestSignalGeneratorSrc.h - include/NVRControlModule.h include/AbsControlModule.h ) @@ -284,8 +281,6 @@ SET(IP_FILES src/OverlayFactory.h src/OverlayFactory.cpp src/TestSignalGeneratorSrc.cpp - src/NVRControlModule.cpp - #src/NVRPipeline.cpp src/AbsControlModule.cpp ) @@ -568,15 +563,10 @@ SET(UT_FILES test/mp4_dts_strategy_tests.cpp test/overlaymodule_tests.cpp test/testSignalGeneratorSrc_tests.cpp - test/nvrcontrolmodule_tests.cpp test/abscontrolmodule_tests.cpp ${ARM64_UT_FILES} ${CUDA_UT_FILES} ) -SET(NVR_UT_FILES - test/utmain.cpp - # test/nvrcontrolmodule_tests.cpp -) IF(ENABLE_LINUX) list(APPEND UT_FILES @@ -587,17 +577,13 @@ ENDIF(ENABLE_LINUX) add_executable(aprapipesut ${UT_FILES}) -#IF(APRA_NVR) -add_executable(aprapipesnvr ${NVR_UT_FILES}) -#ENDIF(APRA_NVR) + IF(ENABLE_ARM64) target_include_directories ( aprapipesut PRIVATE ${JETSON_MULTIMEDIA_LIB_INCLUDE} ${FFMPEG_ROOT} ${JPEG_INCLUDE_DIR}) - target_include_directories ( aprapipesnvr PRIVATE ${JETSON_MULTIMEDIA_LIB_INCLUDE} ${FFMPEG_ROOT} ${JPEG_INCLUDE_DIR}) ENDIF(ENABLE_ARM64) IF (ENABLE_CUDA) target_include_directories ( aprapipesut PRIVATE ${NVCODEC_INCLUDE_DIR}) - target_include_directories ( aprapipesnvr PRIVATE ${NVCODEC_INCLUDE_DIR}) ENDIF (ENABLE_CUDA) @@ -626,28 +612,6 @@ target_link_libraries(aprapipesut sfml-audio ) - target_link_libraries(aprapipesnvr - aprapipes - ${JPEG_LIBRARIES} - ${LIBMP4_LIB} - ${OPENH264_LIB} - ${Boost_LIBRARIES} - ${FFMPEG_LIBRARIES} - ${OpenCV_LIBRARIES} - ${JETSON_LIBS} - ${NVCUDAToolkit_LIBS} - ${NVCODEC_LIB} - ${NVJPEGLIB_L4T} - ${CURSES_LIBRARIES} - ZXing::Core - ZXing::ZXing - BZip2::BZip2 - ZLIB::ZLIB - liblzma::liblzma - bigint::bigint - sfml-audio - ) - IF(ENABLE_WINDOWS) file(COPY ${RUNTIME_DLLS} DESTINATION Debug/) file(COPY ${RUNTIME_DLLS} DESTINATION Release/) diff --git a/base/include/NVRControlModule.h b/base/include/NVRControlModule.h deleted file mode 100644 index 976468135..000000000 --- a/base/include/NVRControlModule.h +++ /dev/null @@ -1,60 +0,0 @@ -#pragma once -#include "Module.h" -#include "AbsControlModule.h" - -class NVRControlModuleProps : public AbsControlModuleProps -{ -public: - NVRControlModuleProps() - { - } - size_t getSerializeSize() - { - return ModuleProps::getSerializeSize(); - } -private: - friend class boost::serialization::access; - - template - void serialize(Archive& ar, const unsigned int version) - { - ar& boost::serialization::base_object(*this); - } -}; - -class NVRControlModule : public AbsControlModule -{ - public: - NVRControlModule(NVRControlModuleProps _props); - ~NVRControlModule(); - bool init(); - bool term(); - void setProps(NVRControlModuleProps& props); - NVRControlModuleProps getProps(); - bool validateModuleRoles(); - bool nvrRecord(bool record); - bool nvrExport(uint64_t startTime, uint64_t stopTime); - bool nvrExportView(uint64_t startTime, uint64_t stopTime); - bool nvrView(bool view); - bool isRendererPaused = false; - uint64_t pausedTS = 0; - uint64_t mp4lastWrittenTS = 0; - uint64_t firstMMQtimestamp = 0; - uint64_t lastMMQtimestamp = 0; - uint64_t givenStart = 0; - uint64_t givenStop = 0; - uint64_t mp4_2_lastWrittenTS = 0; - bool isExporting = false; - -protected: - bool validateInputPins(); - bool validateOutputPins(); - bool validateInputOutputPins(); - bool handleCommand(Command::CommandType type, frame_sp& frame); - bool handlePropsChange(frame_sp& frame); - -private: - void setMetadata(framemetadata_sp& metadata); - class Detail; - boost::shared_ptr mDetail; -}; \ No newline at end of file diff --git a/base/include/PipeLine.h b/base/include/PipeLine.h index 7fee27728..142ffe512 100755 --- a/base/include/PipeLine.h +++ b/base/include/PipeLine.h @@ -1,7 +1,8 @@ #pragma once #include #include -#include "NVRControlModule.h" +// #include "NVRControlModule.h" +#include "AbsControlModule.h" #include "enum_macros.h" #include diff --git a/base/src/NVRControlModule.cpp b/base/src/NVRControlModule.cpp deleted file mode 100644 index d2c09197e..000000000 --- a/base/src/NVRControlModule.cpp +++ /dev/null @@ -1,221 +0,0 @@ -#include -#include -#include "NVRControlModule.h" -#include "Mp4WriterSink.h" -#include "Module.h" -#include "Command.h" - -class NVRControlModule::Detail -{ -public: - Detail(NVRControlModuleProps& _props) : mProps(_props) - { - } - - ~Detail() - { - } - void setProps(NVRControlModuleProps _props) - { - mProps = _props; - } - NVRControlModuleProps mProps; -}; - - -NVRControlModule::NVRControlModule(NVRControlModuleProps _props) - :AbsControlModule(_props) -{ - mDetail.reset(new Detail(_props)); -} - -NVRControlModule::~NVRControlModule() {} - -bool NVRControlModule::validateInputPins() -{ - return true; -} - -bool NVRControlModule::validateOutputPins() -{ - return true; -} - -bool NVRControlModule::validateInputOutputPins() -{ - return true; -} - -bool NVRControlModule::handleCommand(Command::CommandType type, frame_sp& frame) -{ - - if (type == Command::CommandType::NVRCommandView) - { - NVRCommandView cmd; - getCommand(cmd, frame); - for (int i = 0; i < pipelineModules.size(); i++) - { - if (pipelineModules[i] == getModuleofRole("Renderer_1")) // Logic for detecting modules to add - { - auto myId = pipelineModules[i]->getId(); - pipelineModules[i]->queueCommand(cmd); - } - } - // if(cmd.doView == false) - // { - // LOG_ERROR<<" PAUSED COMMAND SENT TO MMQ - paused start is !!"<getId(); - // pipelineModules[i]->queueCommand(cmd); - // } - // } - - // } - return true; - } - if (type == Command::CommandType::NVRCommandExportView) - { - NVRCommandExportView cmd; - getCommand(cmd, frame); - givenStart = cmd.startViewTS; - givenStop = cmd.stopViewTS; - if(pausedTS < firstMMQtimestamp) - { - LOG_ERROR<<" The seeked start time is in disk!!"; - Mp4SeekCommand command; - command.seekStartTS = pausedTS; - command.forceReopen = false; - for (int i = 0; i < pipelineModules.size(); i++) - { - if (pipelineModules[i] == getModuleofRole("Reader_1")) // Sending command to reader - { - auto myId = pipelineModules[i]->getId(); - pipelineModules[i]->queueCommand(command); - pipelineModules[i]->play(true); - return true; - } - } - } - else - { - LOG_ERROR<<" The seeked start time is in MULTIMEDIA-QUEUE!!"; - MultimediaQueueXformCommand cmd; - cmd.startTime = pausedTS; - cmd.endTime = 1694340826000; - for (int i = 0; i < pipelineModules.size(); i++) - { - if (pipelineModules[i] == getModuleofRole("MultimediaQueue")) // Sending command to multimediaQueue - { - auto myid = pipelineModules[i]->getId(); - pipelineModules[i]->queueCommand(cmd); - } - } - } - - return true; - - } - - if (type == Command::CommandType::MMQtimestamps) - { - MMQtimestamps cmd; - getCommand(cmd, frame); - firstMMQtimestamp = cmd.firstTimeStamp; - lastMMQtimestamp = cmd.lastTimeStamp; - return true; - } - - return Module::handleCommand(type, frame); -} - -bool NVRControlModule::handlePropsChange(frame_sp& frame) -{ - NVRControlModuleProps props(mDetail->mProps); - auto ret = Module::handlePropsChange(frame, props); - mDetail->setProps(props); - return ret; -} - -bool NVRControlModule::init() -{ - if (!Module::init()) - { - return false; - } - return true; -} - -bool NVRControlModule::term() -{ - return Module::term(); -} - -NVRControlModuleProps NVRControlModule::getProps() -{ - fillProps(mDetail->mProps); - return mDetail->mProps; -} - -void NVRControlModule::setProps(NVRControlModuleProps& props) -{ - Module::addPropsToQueue(props); -} - -bool NVRControlModule::validateModuleRoles() -{ - for (int i = 0; i < pipelineModules.size(); i++) - { - bool modPresent = false; - for (auto it = moduleRoles.begin(); it != moduleRoles.end(); it++) - { - if (pipelineModules[i] == it->second) - { - modPresent = true; - } - } - if (!modPresent) - { - LOG_ERROR << "Modules and roles validation failed!!"; - } - } - return true; -} - -bool NVRControlModule::nvrRecord(bool record) -{ - NVRCommandRecord cmd; - cmd.doRecording = record; - return queueCommand(cmd); -} - -bool NVRControlModule::nvrExport(uint64_t ts, uint64_t te) -{ - NVRCommandExport cmd; - cmd.startExportTS = ts; - cmd.stopExportTS = te; - return queueCommand(cmd); -} - -bool NVRControlModule::nvrExportView(uint64_t ts, uint64_t te) -{ - NVRCommandExportView cmd; - cmd.startViewTS = ts; - cmd.stopViewTS = te; - return queueCommand(cmd); -} - -bool NVRControlModule::nvrView(bool view) -{ - NVRCommandView cmd; - cmd.doView = view; - return queueCommand(cmd); -} \ No newline at end of file diff --git a/base/test/nvrcontrolmodule_tests.cpp b/base/test/nvrcontrolmodule_tests.cpp deleted file mode 100644 index 7c243c803..000000000 --- a/base/test/nvrcontrolmodule_tests.cpp +++ /dev/null @@ -1,1298 +0,0 @@ -#include -#include -#include -#include -#include -#include -//#include "NVRPipeline.h" -#include "PipeLine.h" -#include "ExternalSinkModule.h" -#include "EncodedImageMetadata.h" -#include "FrameContainerQueue.h" -#include "Module.h" -#include "Utils.h" -#include "NVRControlModule.h" -#include "WebCamSource.h" -#include "H264EncoderNVCodec.h" -#include "Mp4WriterSink.h" -#include "CudaMemCopy.h" -#include "CudaStreamSynchronize.h" -#include "ColorConversionXForm.h" -#include "FileReaderModule.h" -#include "ImageViewerModule.h" -#include "KeyboardListener.h" -#include "MultimediaQueueXform.h" -#include "H264Metadata.h" -#include "ValveModule.h" -#include "Mp4ReaderSource.h" -#include "Mp4VideoMetadata.h" -#include "FileWriterModule.h" -#include "NvV4L2Camera.h" //Jetson -#include "NvTransform.h" -#include "EglRenderer.h" - -BOOST_AUTO_TEST_SUITE(nvrcontrolmodule_tests) - - -// struct CheckThread { -// class SourceModuleProps : public ModuleProps -// { -// public: -// SourceModuleProps() : ModuleProps() -// {}; -// }; -// class TransformModuleProps : public ModuleProps -// { -// public: -// TransformModuleProps() : ModuleProps() -// {}; -// }; -// class SinkModuleProps : public ModuleProps -// { -// public: -// SinkModuleProps() : ModuleProps() -// {}; -// }; - -// class SourceModule : public Module -// { -// public: -// SourceModule(SourceModuleProps props) : Module(SOURCE, "sourceModule", props) -// { -// }; - -// protected: -// bool process() { return false; } -// bool validateOutputPins() -// { -// return true; -// } -// bool validateInputPins() -// { -// return true; -// } -// }; -// class TransformModule : public Module -// { -// public: -// TransformModule(TransformModuleProps props) :Module(TRANSFORM, "transformModule", props) {}; -// protected: -// bool process() { return false; } -// bool validateOutputPins() -// { -// return true; -// } -// bool validateInputPins() -// { -// return true; -// } -// }; -// class SinkModule : public Module -// { -// public: -// SinkModule(SinkModuleProps props) :Module(SINK, "mp4WritersinkModule", props) {}; -// protected: -// bool process() { return false; } -// bool validateOutputPins() -// { -// return true; -// } -// bool validateInputPins() -// { -// return true; -// } -// }; -// }; - -// void key_func(boost::shared_ptr& mControl) -// { - -// while (true) { -// int k; -// k = getchar(); -// if (k == 97) -// { -// BOOST_LOG_TRIVIAL(info) << "Starting Render!!"; -// mControl->nvrView(true); -// mControl->step(); -// } -// if (k == 100) -// { -// BOOST_LOG_TRIVIAL(info) << "Stopping Render!!"; -// mControl->nvrView(false); -// mControl->step(); -// } -// if (k == 101) -// { -// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); -// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); -// uint64_t seekStartTS = now - 180000; -// uint64_t seekEndTS = now + 120000; -// mControl->nvrExport(seekStartTS, seekEndTS); -// mControl->step(); -// } -// if (k == 114) -// { -// BOOST_LOG_TRIVIAL(info) << "Starting Reading from disk!!"; -// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); -// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); -// uint64_t seekStartTS = now - 5000; -// uint64_t seekEndTS = now; -// mControl->nvrExport(seekStartTS, seekEndTS); -// mControl->step(); -// } -// else -// { -// BOOST_LOG_TRIVIAL(info) << "The value pressed is .."<< k; -// } -// } -// } - -// void key_Read_func(boost::shared_ptr& mControl, boost::shared_ptr& mp4Reader) -// { - -// while (true) { -// int k; -// k = getchar(); -// if (k == 97) -// { -// BOOST_LOG_TRIVIAL(info) << "Starting Render!!"; -// mControl->nvrView(true); -// mControl->step(); -// } -// if (k == 100) -// { -// BOOST_LOG_TRIVIAL(info) << "Stopping Render!!"; -// mControl->nvrView(false); -// mControl->step(); -// } -// if (k == 101) -// { -// /*uint64_t x, y; -// cout << "Enter start time of Export : "; -// cin >> x; -// cout << "Enter end time of Export : "; -// cin >> y; -// cout << "Start time is " << x << " End time is " << y;*/ -// BOOST_LOG_TRIVIAL(info) << "Starting Reading from disk!!"; -// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); -// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); -// uint64_t seekStartTS = now - 180000; -// uint64_t seekEndTS = now + 120000; -// mControl->nvrExport(seekStartTS, seekEndTS); -// mControl->step(); -// } -// if (k == 114) -// { -// BOOST_LOG_TRIVIAL(info) << "Starting Reading from disk!!"; -// boost::posix_time::ptime const time_epoch(boost::gregorian::date(1970, 1, 1)); -// auto now = (boost::posix_time::microsec_clock::universal_time() - time_epoch).total_milliseconds(); -// uint64_t seekStartTS = now + 30000; -// uint64_t seekEndTS = now + 60000; -// mControl->nvrExport(seekStartTS, seekEndTS); -// mControl->step(); -// mp4Reader->play(true); -// } -// if (k == 112) -// { -// BOOST_LOG_TRIVIAL(info) << "Stopping Pipeline Input"; -// } - -// else -// { -// BOOST_LOG_TRIVIAL(info) << "The value pressed is .." << k; -// } -// } -// } - -// BOOST_AUTO_TEST_CASE(basic) -// { -// CheckThread f; - -// auto m1 = boost::shared_ptr(new CheckThread::SourceModule(CheckThread::SourceModuleProps())); -// auto metadata1 = framemetadata_sp(new FrameMetadata(FrameMetadata::ENCODED_IMAGE)); -// m1->addOutputPin(metadata1); -// auto m2 = boost::shared_ptr(new CheckThread::TransformModule(CheckThread::TransformModuleProps())); -// m1->setNext(m2); -// auto metadata2 = framemetadata_sp(new FrameMetadata(FrameMetadata::ENCODED_IMAGE)); -// m2->addOutputPin(metadata2); -// auto m3 = boost::shared_ptr(new CheckThread::TransformModule(CheckThread::TransformModuleProps())); -// m2->setNext(m3); -// auto metadata3 = framemetadata_sp(new FrameMetadata(FrameMetadata::ENCODED_IMAGE)); -// m3->addOutputPin(metadata3); -// auto m4 = boost::shared_ptr(new CheckThread::SinkModule(CheckThread::SinkModuleProps())); -// m3->setNext(m4); -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// // add all source modules -// p.appendModule(m1); -// // add control module if any -// p.addControlModule(mControl); -// mControl->enrollModule("source", m1); -// mControl->enrollModule("transform_1", m2); -// mControl->enrollModule("writer", m3); -// mControl->enrollModule("sink", m4); -// // init -// p.init(); -// // control init - do inside pipeline init -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(10)); -// mControl->nvrView(false); -// // dont need step in run_all_threaded -// mControl->step(); - -// boost::this_thread::sleep_for(boost::chrono::seconds(10)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// } - -// BOOST_AUTO_TEST_CASE(checkNVR) -// { -// auto nvrPipe = boost::shared_ptr(new NVRPipeline()); -// nvrPipe->open(); -// //nvrPipe->startRecording(); -// nvrPipe->close(); -// } - -// BOOST_AUTO_TEST_CASE(NVRTest) -// { -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 640; -// auto height = 360; - -// // test with 0 - with multiple cameras - -// WebCamSourceProps webCamSourceprops(0, 1920, 1080); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); -// webCam->setNext(colorConvt); -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); -// copy->setNext(encoder); -// std::string outFolderPath = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps = Mp4WriterSinkProps(1, 1, 24, outFolderPath); -// mp4WriterSinkProps.logHealth = true; -// mp4WriterSinkProps.logHealthFrequency = 10; -// auto mp4Writer = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps)); -// encoder->setNext(mp4Writer); -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// p.appendModule(webCam); -// // add control module if any -// p.addControlModule(mControl); -// mControl->enrollModule("source", webCam); -// mControl->enrollModule("colorConversion", colorConvt); -// mControl->enrollModule("cudaCopy", copy); -// mControl->enrollModule("encoder", encoder); -// mControl->enrollModule("Writer-1", mp4Writer); -// // init -// p.init(); -// // control init - do inside pipeline init -// mControl->init(); -// p.run_all_threaded(); -// //mControl->nvrRecord(true); -// // dont need step in run_all_threaded -// mControl->step(); - -// boost::this_thread::sleep_for(boost::chrono::seconds(240)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// } - -// BOOST_AUTO_TEST_CASE(NVRView) -// { -// WebCamSourceProps webCamSourceprops(0, 1920, 1080); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto multique = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(10000, 5000, true))); -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// webCam->setNext(multique); -// multique->setNext(view); -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// p.appendModule(webCam); -// boost::thread inp(key_func, mControl); -// p.addControlModule(mControl); -// mControl->enrollModule("filereader", webCam); -// mControl->enrollModule("viewer", view); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(30)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(NVRViewKey) -// { -// WebCamSourceProps webCamSourceprops(0, 1920, 1080); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// webCam->setNext(view); -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// std::thread inp(key_func, std::ref(mControl)); -// p.appendModule(webCam); -// p.addControlModule(mControl); -// mControl->enrollModule("filereader", webCam); -// mControl->enrollModule("viewer", view); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(100)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// inp.join(); -// } - - -// BOOST_AUTO_TEST_CASE(NVRFile) -// { -// std::string inFolderPath = "./data/Raw_YUV420_640x360"; -// auto fileReaderProps = FileReaderModuleProps(inFolderPath, 0, -1); -// fileReaderProps.fps = 20; -// fileReaderProps.readLoop = true; -// auto fileReader = boost::shared_ptr(new FileReaderModule(fileReaderProps)); // -// auto metadata = framemetadata_sp(new RawImageMetadata(640, 360, ImageMetadata::ImageType::MONO, CV_8UC1, 0, CV_8U, FrameMetadata::HOST, true)); -// auto pinId = fileReader->addOutputPin(metadata); -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// fileReader->setNext(view); -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// p.appendModule(fileReader); -// p.addControlModule(mControl); -// mControl->enrollModule("filereader", fileReader); -// mControl->enrollModule("viewer", view); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); - -// boost::this_thread::sleep_for(boost::chrono::seconds(15)); -// mControl->nvrView(false); -// mControl->step(); -// boost::this_thread::sleep_for(boost::chrono::seconds(10)); -// mControl->nvrView(true); -// mControl->step(); -// boost::this_thread::sleep_for(boost::chrono::seconds(15)); - -// p.stop(); -// p.term(); -// p.wait_for_all(); -// } - -// BOOST_AUTO_TEST_CASE(NVRkey) -// { -// std::string inFolderPath = "./data/h264_data"; -// auto fileReaderProps = FileReaderModuleProps(inFolderPath, 0, -1); -// fileReaderProps.fps = 20; -// fileReaderProps.readLoop = true; -// auto fileReader = boost::shared_ptr(new FileReaderModule(fileReaderProps)); // -// auto encodedImageMetadata = framemetadata_sp(new H264Metadata(704, 576)); -// auto pinId = fileReader->addOutputPin(encodedImageMetadata); -// //auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// fileReader->setNext(mp4Writer_1); -// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; -// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_2); -// mp4WriterSinkProps_2.logHealth = true; -// mp4WriterSinkProps_2.logHealthFrequency = 10; -// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); -// //fileReader->setNext(mp4Writer_2); -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// //std::thread inp(key_func, mControl); -// p.appendModule(fileReader); -// p.addControlModule(mControl); -// mControl->enrollModule("filereader", fileReader); -// mControl->enrollModule("writer", mp4Writer_1); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(10)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// //inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(NVR_mmq) -// { -// std::string inFolderPath = "./data/h264_data"; -// auto fileReaderProps = FileReaderModuleProps(inFolderPath, 0, -1); -// fileReaderProps.fps = 20; -// fileReaderProps.readLoop = true; -// auto fileReader = boost::shared_ptr(new FileReaderModule(fileReaderProps)); // -// auto encodedImageMetadata = framemetadata_sp(new H264Metadata(704, 576)); -// auto pinId = fileReader->addOutputPin(encodedImageMetadata); - -// auto multiQueue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(30000, 5000, true))); -// fileReader->setNext(multiQueue); - -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// multiQueue->setNext(mp4Writer_1); - -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// std::thread inp(key_func, std::ref(mControl)); -// p.appendModule(fileReader); -// p.addControlModule(mControl); -// mControl->enrollModule("filereader", fileReader); -// mControl->enrollModule("multimediaQueue", multiQueue); -// mControl->enrollModule("writer", mp4Writer_1); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(50)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(NVR_mmq_view) -// { -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 1920; -// auto height = 1020; - - -// WebCamSourceProps webCamSourceprops(0, 1920, 1080); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); -// webCam->setNext(colorConvt); - -// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); -// webCam->setNext(colorConvtView); - -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// colorConvtView->setNext(view); - -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); - -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); -// copy->setNext(encoder); - - -// auto multiQueue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(30000, 5000, true))); -// encoder->setNext(multiQueue); - -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 1, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// multiQueue->setNext(mp4Writer_1); - -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// std::thread inp(key_func, std::ref(mControl)); -// p.appendModule(webCam); -// p.addControlModule(mControl); -// mControl->enrollModule("webcamera", webCam); -// mControl->enrollModule("multimediaQueue", multiQueue); -// mControl->enrollModule("writer", mp4Writer_1); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(60)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(checkNVR2) //Use this for testing pipeline note - Only one mp4Writer is present in this pipeline -// { -// LoggerProps loggerProps; -// loggerProps.logLevel = boost::log::trivial::severity_level::info; -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// Logger::initLogger(loggerProps); - -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 640; -// auto height = 360; - - -// WebCamSourceProps webCamSourceprops(0, 1920, 1080); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); -// webCam->setNext(colorConvt); - -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); -// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); -// webCam->setNext(colorConvtView); -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// colorConvtView->setNext(view); -// H264EncoderNVCodecProps encProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames); -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(encProps)); -// copy->setNext(encoder); - -// auto multiQueue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(10000, 5000, true))); -// encoder->setNext(multiQueue); - -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// multiQueue->setNext(mp4Writer_1); - -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); - -// PipeLine p("test"); -// std::thread inp(key_func, std::ref(mControl)); -// p.appendModule(webCam); -// p.addControlModule(mControl); -// mControl->enrollModule("WebCamera", webCam); -// mControl->enrollModule("Renderer", view); -// mControl->enrollModule("Writer-1", mp4Writer_1); -// mControl->enrollModule("MultimediaQueue", multiQueue); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(360)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(checkNVR3) //Use this for testing pipeline note - Mimics the actual pipeline -// { -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 640; //1920 -// auto height = 360; //1020 - - -// WebCamSourceProps webCamSourceprops(0, 640, 360); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); -// webCam->setNext(colorConvt); - -// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); -// webCam->setNext(colorConvtView); - -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// colorConvtView->setNext(view); - -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); - -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); -// copy->setNext(encoder); - -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// encoder->setNext(mp4Writer_1); - -// auto multiQue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(10000, 5000, true))); -// encoder->setNext(multiQue); -// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; -// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); -// mp4WriterSinkProps_2.logHealth = true; -// mp4WriterSinkProps_2.logHealthFrequency = 10; -// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); -// multiQue->setNext(mp4Writer_2); - -// //auto fileWriter = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps("./data/testOutput/h264images/Raw_YUV420_640x360????.h264"))); -// //multiQue->setNext(fileWriter); - -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); -// PipeLine p("test"); -// std::thread inp(key_func, std::ref(mControl)); -// p.appendModule(webCam); -// p.addControlModule(mControl); -// mControl->enrollModule("WebCamera", webCam); -// mControl->enrollModule("Renderer", view); -// mControl->enrollModule("Writer-1", mp4Writer_1); -// mControl->enrollModule("MultimediaQueue", multiQue); -// mControl->enrollModule("Writer-2", mp4Writer_2); - -// p.init(); -// mControl->init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(360)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(mp4Read) -// { -// LoggerProps loggerProps; -// loggerProps.logLevel = boost::log::trivial::severity_level::info; -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// Logger::initLogger(loggerProps); - -// std::string skipDir = "./data/testOutput/mp4_videos/24bpp"; -// std::string startingVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0013/1669188939867.mp4"; -// std::string outPath = "data/testOutput/outFrames"; -// uint64_t seekStartTS = 1669119595641; -// uint64_t seekEndTS = 1669119595641 + 10000; -// boost::filesystem::path file("frame_??????.h264"); -// auto frameType = FrameMetadata::FrameType::H264_DATA; -// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); - -// boost::filesystem::path dir(outPath); - -// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); -// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); -// mp4Reader->addOutPutPin(h264ImageMetadata); -// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); -// mp4Reader->addOutPutPin(mp4Metadata); - -// mp4ReaderProps.skipDir = skipDir; - -// boost::filesystem::path full_path = dir / file; -// LOG_INFO << full_path; -// //std::string outFolderPath_2 = "./data/testOutput/testVids"; -// //auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); -// //mp4WriterSinkProps_2.logHealth = true; -// //mp4WriterSinkProps_2.logHealthFrequency = 10; -// //auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); -// //mp4Reader->setNext(mp4Writer_2); -// auto fileWriterProps = FileWriterModuleProps("./data/testOutput/mp4WriterModuleFrame_ ????.h264"); -// auto fileWriter = boost::shared_ptr(new FileWriterModule(fileWriterProps)); -// std::vector mImagePin; -// mImagePin = mp4Reader->getAllOutputPinsByType(frameType); -// mp4Reader->setNext(fileWriter, mImagePin); -// boost::shared_ptr p; -// p = boost::shared_ptr(new PipeLine("test")); -// p->appendModule(mp4Reader); - -// if (!p->init()) -// { -// throw AIPException(AIP_FATAL, "Engine Pipeline init failed. Check IPEngine Logs for more details."); -// } - -// mp4Reader->setProps(mp4ReaderProps); -// mp4Reader->randomSeek(seekStartTS, seekEndTS); - -// p->run_all_threaded(); - -// boost::this_thread::sleep_for(boost::chrono::seconds(10)); - -// p->stop(); -// p->term(); -// p->wait_for_all(); -// p.reset(); -// } - -// BOOST_AUTO_TEST_CASE(mp4ReadView) -// { -// LoggerProps loggerProps; -// loggerProps.logLevel = boost::log::trivial::severity_level::info; -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// Logger::initLogger(loggerProps); - -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 640; //1920 -// auto height = 360; //1020 - -// //WebCam pipeline -// WebCamSourceProps webCamSourceprops(0, 640, 360); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); -// webCam->setNext(colorConvt); - -// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); -// webCam->setNext(colorConvtView); - -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// colorConvtView->setNext(view); - -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); - -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); -// copy->setNext(encoder); - -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// encoder->setNext(mp4Writer_1); - -// //Reader pipeline -// //std::string skipDir = "./data/Mp4_videos/h264_video_metadata/"; -// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; -// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; -// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0017/"; -// boost::filesystem::path file("frame_??????.h264"); -// auto frameType = FrameMetadata::FrameType::H264_DATA; -// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); - -// boost::filesystem::path dir(outPath); - -// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); -// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); -// mp4Reader->addOutPutPin(h264ImageMetadata); -// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); -// mp4Reader->addOutPutPin(mp4Metadata); - -// //mp4ReaderProps.skipDir = skipDir; - -// boost::filesystem::path full_path = dir / file; -// LOG_INFO << full_path; -// /*std::string outFolderPath_2 = "./data/testOutput/testVids"; -// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); -// mp4WriterSinkProps_2.logHealth = true; -// mp4WriterSinkProps_2.logHealthFrequency = 10; -// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); -// mp4Reader->setNext(mp4Writer_2);*/ //fileWriterModuleFrame_????.jpg -// auto fileWriterProps = FileWriterModuleProps("./data/testOutput/mp4WriterModuleFrame_????.h264"); -// auto fileWriter = boost::shared_ptr(new FileWriterModule(fileWriterProps)); -// std::vector mImagePin; -// mImagePin = mp4Reader->getAllOutputPinsByType(frameType); -// mp4Reader->setNext(fileWriter, mImagePin); -// //Pipeline - -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); -// PipeLine p("test"); -// std::thread inp(key_Read_func,std::ref(mControl), std::ref(mp4Reader)); -// p.appendModule(webCam); -// p.appendModule(mp4Reader); -// p.addControlModule(mControl); -// mControl->enrollModule("WebCamera", webCam); -// mControl->enrollModule("Renderer", view); -// mControl->enrollModule("Writer-1", mp4Writer_1); -// mControl->enrollModule("Reader", mp4Reader); -// //mControl->enrollModule("Writer-2", mp4Writer_2); - -// p.init(); -// mControl->init(); -// mp4Reader->play(false); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(150)); -// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221023/0018/"))) -// { -// if (boost::filesystem::is_regular_file(folder)) -// { -// boost::filesystem::path p = folder.path(); -// changedVideoPath = p.string(); -// break; -// } -// } -// Mp4ReaderSourceProps propsChange(changedVideoPath, true); -// mp4Reader->setProps(propsChange); -// boost::this_thread::sleep_for(boost::chrono::seconds(360)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(mp4ReadWrite) -// { -// LoggerProps loggerProps; -// loggerProps.logLevel = boost::log::trivial::severity_level::info; -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// Logger::initLogger(loggerProps); - -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 640; //1920 -// auto height = 360; //1020 - -// //WebCam pipeline -// WebCamSourceProps webCamSourceprops(0, 640, 360); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); -// webCam->setNext(colorConvt); - -// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); -// webCam->setNext(colorConvtView); - -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// colorConvtView->setNext(view); - -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); - -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); -// copy->setNext(encoder); - -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// encoder->setNext(mp4Writer_1); - -// //Reader pipeline -// //std::string skipDir = "./data/Mp4_videos/h264_video_metadata/"; -// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; -// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; -// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0017/"; -// boost::filesystem::path file("frame_??????.h264"); -// auto frameType = FrameMetadata::FrameType::H264_DATA; -// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); - -// boost::filesystem::path dir(outPath); - -// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); -// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); -// mp4Reader->addOutPutPin(h264ImageMetadata); -// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); -// mp4Reader->addOutPutPin(mp4Metadata); - -// //mp4ReaderProps.skipDir = skipDir; - -// boost::filesystem::path full_path = dir / file; -// LOG_INFO << full_path; -// std::string outFolderPath_2 = "./data/testOutput/testVids"; -// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); -// mp4WriterSinkProps_2.logHealth = true; -// mp4WriterSinkProps_2.logHealthFrequency = 10; -// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); -// mp4Reader->setNext(mp4Writer_2); //fileWriterModuleFrame_????.jpg -// //auto fileWriterProps = FileWriterModuleProps("./data/testOutput/mp4WriterModuleFrame_????.h264"); -// //auto fileWriter = boost::shared_ptr(new FileWriterModule(fileWriterProps)); -// //std::vector mImagePin; -// //mImagePin = mp4Reader->getAllOutputPinsByType(frameType); -// //mp4Reader->setNext(fileWriter, mImagePin); -// //Pipeline - -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); -// PipeLine p("test"); -// std::thread inp(key_Read_func,std::ref(mControl), std::ref(mp4Reader)); -// p.appendModule(webCam); -// p.appendModule(mp4Reader); -// p.addControlModule(mControl); -// mControl->enrollModule("WebCamera", webCam); -// mControl->enrollModule("Renderer", view); -// mControl->enrollModule("Writer-1", mp4Writer_1); -// mControl->enrollModule("Reader", mp4Reader); -// mControl->enrollModule("Writer-2", mp4Writer_2); - -// p.init(); -// mControl->init(); -// mp4Reader->play(false); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(150)); -// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221024/0018/"))) -// { -// if (boost::filesystem::is_regular_file(folder)) -// { -// boost::filesystem::path p = folder.path(); -// changedVideoPath = p.string(); -// break; -// } -// } -// Mp4ReaderSourceProps propsChange(changedVideoPath, true); -// mp4Reader->setProps(propsChange); -// boost::this_thread::sleep_for(boost::chrono::seconds(360)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; -// inp.join(); -// } - - -// BOOST_AUTO_TEST_CASE(checkNVR4) //Use this for testing pipeline note - Mimics the actual pipeline -// { -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 640; //1920 -// auto height = 360; //1020 - -// //WebCam -// WebCamSourceProps webCamSourceprops(0, 640, 360); -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); -// //Color Conversion - -// auto colorConvtView = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR))); -// webCam->setNext(colorConvtView); - -// auto view = boost::shared_ptr(new ImageViewerModule(ImageViewerModuleProps("NVR-View"))); -// colorConvtView->setNext(view); - -// auto colorConvt = boost::shared_ptr(new ColorConversion(ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR))); -// webCam->setNext(colorConvt); //WebCam->ColorConversion - -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); - -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames))); -// copy->setNext(encoder); - -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 10; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// encoder->setNext(mp4Writer_1); - -// auto multiQue = boost::shared_ptr(new MultimediaQueueXform(MultimediaQueueXformProps(90000, 30000, true))); -// encoder->setNext(multiQue); - -// //auto fileWriter = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps("./data/testOutput/h264images/Raw_YUV420_640x360????.h264"))); -// //multiQue->setNext(fileWriter); - -// //Reader pipeline -// //std::string skipDir = "./data/Mp4_videos/h264_video_metadata/"; -// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; -// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; -// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0017/"; -// boost::filesystem::path file("frame_??????.h264"); -// auto frameType = FrameMetadata::FrameType::H264_DATA; -// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); - -// boost::filesystem::path dir(outPath); - -// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); -// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); -// mp4Reader->addOutPutPin(h264ImageMetadata); -// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); -// mp4Reader->addOutPutPin(mp4Metadata); - -// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; -// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); -// mp4WriterSinkProps_2.logHealth = true; -// mp4WriterSinkProps_2.logHealthFrequency = 10; -// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); - -// std::string outFolderPath_3 = "./data/testOutput/mp4_videos/ExportVids/"; -// auto mp4WriterSinkProps_3 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_2); -// mp4WriterSinkProps_3.logHealth = true; -// mp4WriterSinkProps_3.logHealthFrequency = 10; -// auto mp4Writer_3 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); - -// multiQue->setNext(mp4Writer_2); - - -// boost::filesystem::path full_path = dir / file; -// LOG_INFO << full_path; -// mp4Reader->setNext(mp4Writer_3); - -// auto mControl = boost::shared_ptr(new NVRControlModule(NVRControlModuleProps())); -// PipeLine p("test"); -// std::thread inp(key_Read_func,std::ref(mControl), std::ref(mp4Reader)); -// p.appendModule(webCam); -// p.appendModule(mp4Reader); -// p.addControlModule(mControl); -// mControl->enrollModule("WebCamera", webCam); -// mControl->enrollModule("Reader", mp4Reader); -// mControl->enrollModule("Renderer", view); -// mControl->enrollModule("Writer-1", mp4Writer_1); -// mControl->enrollModule("MultimediaQueue", multiQue); -// mControl->enrollModule("Writer-2", mp4Writer_2); -// mControl->enrollModule("Writer-3", mp4Writer_3); - -// p.init(); -// mControl->init(); -// mp4Reader->play(false); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(150)); -// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221030/0012/"))) -// { -// if (boost::filesystem::is_regular_file(folder)) -// { -// boost::filesystem::path p = folder.path(); -// changedVideoPath = p.string(); -// break; -// } -// } -// Mp4ReaderSourceProps propsChange(changedVideoPath, true); -// mp4Reader->setProps(propsChange); -// boost::this_thread::sleep_for(boost::chrono::seconds(600)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(checkNVR5) //Use this for testing pipeline note - Mimics the actual pipeline -// { -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// //Logger::initLogger(logprops); - -// auto cuContext = apracucontext_sp(new ApraCUcontext()); -// uint32_t gopLength = 25; -// uint32_t bitRateKbps = 1000; -// uint32_t frameRate = 30; -// H264EncoderNVCodecProps::H264CodecProfile profile = H264EncoderNVCodecProps::MAIN; -// bool enableBFrames = true; -// auto width = 640; -// auto height = 360; - -// //WebCam -// WebCamSourceProps webCamSourceprops(0, 640, 360); -// webCamSourceprops.logHealth = true; -// webCamSourceprops.logHealthFrequency = 100; -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); - -// //Color Conversion View -// auto colorProps1 = ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR); -// colorProps1.logHealth = true; -// colorProps1.logHealthFrequency = 100; -// auto colorConvtView = boost::shared_ptr(new ColorConversion(colorProps1)); -// webCam->setNext(colorConvtView); - -// //ImageViewer -// ImageViewerModuleProps imgViewerProps("NVR-View"); -// imgViewerProps.logHealth = true; -// imgViewerProps.logHealthFrequency = 100; -// auto view = boost::shared_ptr(new ImageViewerModule(imgViewerProps)); -// colorConvtView->setNext(view); - -// //Color Conversion to encoder -// auto colorProps2 = ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_YUV420PLANAR); -// colorProps2.logHealth = true; -// colorProps2.logHealthFrequency = 100; -// colorProps2.fps = 30; -// auto colorConvt = boost::shared_ptr(new ColorConversion(colorProps2)); -// webCam->setNext(colorConvt); //WebCam->ColorConversion - -// //Cuda Mem Copy -// cudastream_sp cudaStream_ = boost::shared_ptr(new ApraCudaStream()); -// auto copyProps = CudaMemCopyProps(cudaMemcpyHostToDevice, cudaStream_); -// copyProps.logHealth = true; -// copyProps.logHealthFrequency = 100; -// copyProps.fps = 30; -// auto copy = boost::shared_ptr(new CudaMemCopy(copyProps)); -// colorConvt->setNext(copy); - -// //H264 Encoder -// auto encoderProps = H264EncoderNVCodecProps(bitRateKbps, cuContext, gopLength, frameRate, profile, enableBFrames); -// encoderProps.logHealth = true; -// encoderProps.logHealthFrequency = 100; -// encoderProps.fps = 30; -// auto encoder = boost::shared_ptr(new H264EncoderNVCodec(encoderProps)); -// copy->setNext(encoder); - -// auto sinkProps = ExternalSinkModuleProps(); -// sinkProps.logHealth = true; -// sinkProps.logHealthFrequency = 100; -// auto sink = boost::shared_ptr(new ExternalSinkModule(sinkProps)); - -// //MP4 Writer-1 (24/7 writer) -// std::string outFolderPath_1 = "./data/testOutput/mp4_videos/24bpp/"; -// auto mp4WriterSinkProps_1 = Mp4WriterSinkProps(1, 10, 24, outFolderPath_1); -// mp4WriterSinkProps_1.logHealth = true; -// mp4WriterSinkProps_1.logHealthFrequency = 100; -// mp4WriterSinkProps_1.fps = 30; -// auto mp4Writer_1 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_1)); -// //encoder->setNext(mp4Writer_1); -// encoder->setNext(sink); - -// //MultimediaQueue -// auto multiProps = MultimediaQueueXformProps(120000, 30000, true); -// multiProps.logHealth = true; -// multiProps.logHealthFrequency = 100; -// multiProps.fps = 30; -// auto multiQue = boost::shared_ptr(new MultimediaQueueXform(multiProps)); -// encoder->setNext(multiQue); - -// //auto fileWriter = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps("./data/testOutput/h264images/Raw_YUV420_640x360????.h264"))); -// //multiQue->setNext(fileWriter); - -// //MP4 Reader [Source] -// std::string startingVideoPath = "./data/Mp4_videos/h264_video/20221010/0012/1668064027062.mp4"; -// std::string outPath = "./data/testOutput/mp4_videos/24bpp"; -// std::string changedVideoPath = "./data/testOutput/mp4_videos/24bpp/20221023/0011/"; -// boost::filesystem::path file("frame_??????.h264"); -// auto frameType = FrameMetadata::FrameType::H264_DATA; -// auto h264ImageMetadata = framemetadata_sp(new H264Metadata(0, 0)); -// boost::filesystem::path dir(outPath); -// auto mp4ReaderProps = Mp4ReaderSourceProps(startingVideoPath, false, true); -// mp4ReaderProps.logHealth = true; -// mp4ReaderProps.logHealthFrequency = 100; -// mp4ReaderProps.fps = 30; -// auto mp4Reader = boost::shared_ptr(new Mp4ReaderSource(mp4ReaderProps)); -// mp4Reader->addOutPutPin(h264ImageMetadata); -// auto mp4Metadata = framemetadata_sp(new Mp4VideoMetadata("v_1")); -// mp4Reader->addOutPutPin(mp4Metadata); - -// //MP4 Writer-2 exports -// std::string outFolderPath_2 = "./data/testOutput/mp4_videos/ExportVids/"; -// auto mp4WriterSinkProps_2 = Mp4WriterSinkProps(60, 10, 24, outFolderPath_2); -// mp4WriterSinkProps_2.logHealth = false; -// mp4WriterSinkProps_2.logHealthFrequency = 100; -// mp4WriterSinkProps_2.fps = 30; -// auto mp4Writer_2 = boost::shared_ptr(new Mp4WriterSink(mp4WriterSinkProps_2)); -// multiQue->setNext(mp4Writer_2); -// boost::filesystem::path full_path = dir / file; -// LOG_INFO << full_path; -// //mp4Reader->setNext(mp4Writer_2); -// mp4Reader->setNext(sink); - -// //NVR ControlModule -// auto controlProps = NVRControlModuleProps(); -// controlProps.logHealth = true; -// controlProps.logHealthFrequency = 100; -// controlProps.fps = 30; -// auto mControl = boost::shared_ptr(new NVRControlModule(controlProps)); -// Logger::setLogLevel(boost::log::trivial::severity_level::info); - - -// PipeLine p("test"); -// std::thread inp(key_Read_func, mControl, mp4Reader); -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// p.appendModule(webCam); -// p.appendModule(mp4Reader); -// p.addControlModule(mControl); -// mControl->enrollModule("WebCamera", webCam); -// mControl->enrollModule("Reader", mp4Reader); -// mControl->enrollModule("Renderer", view); -// mControl->enrollModule("Writer-1", mp4Writer_1); -// mControl->enrollModule("MultimediaQueue", multiQue); -// mControl->enrollModule("Writer-2", mp4Writer_2); -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// p.init(); -// mControl->init(); -// mp4Reader->play(false); -// p.run_all_threaded(); -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// boost::this_thread::sleep_for(boost::chrono::seconds(150)); -// Logger::setLogLevel(boost::log::trivial::severity_level::info); -// for (const auto& folder : boost::filesystem::recursive_directory_iterator(boost::filesystem::path("./data/testOutput/mp4_videos/24bpp/20221114/0012/"))) -// { -// if (boost::filesystem::is_regular_file(folder)) -// { -// boost::filesystem::path p = folder.path(); -// changedVideoPath = p.string(); -// break; -// } -// } -// Mp4ReaderSourceProps propsChange(changedVideoPath, true); -// mp4Reader->setProps(propsChange); -// boost::this_thread::sleep_for(boost::chrono::seconds(24000)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// BOOST_LOG_TRIVIAL(info) << "The first thread has stopped"; -// inp.join(); -// } - -// BOOST_AUTO_TEST_CASE(dummyTester) -// { -// Logger::setLogLevel(boost::log::trivial::severity_level::info); - -// //WebCam -// WebCamSourceProps webCamSourceprops(0, 640, 360); -// webCamSourceprops.logHealth = true; -// webCamSourceprops.logHealthFrequency = 100; -// auto webCam = boost::shared_ptr(new WebCamSource(webCamSourceprops)); - -// //Color Conversion View -// auto colorProps1 = ColorConversionProps(ColorConversionProps::ConversionType::RGB_TO_BGR); -// colorProps1.logHealth = true; -// colorProps1.logHealthFrequency = 100; -// auto colorConvtView = boost::shared_ptr(new ColorConversion(colorProps1)); -// //webCam->setNext(colorConvtView); - -// //ImageViewer -// ImageViewerModuleProps imgViewerProps("NVR-View"); -// imgViewerProps.logHealth = true; -// imgViewerProps.logHealthFrequency = 100; -// auto view = boost::shared_ptr(new ImageViewerModule(imgViewerProps)); -// webCam->setNext(view); - -// //External Sink -// auto sinkProps = ExternalSinkModuleProps(); -// sinkProps.logHealth = true; -// sinkProps.logHealthFrequency = 50; -// auto sink = boost::shared_ptr(new ExternalSinkModule(sinkProps)); -// //colorConvtView->setNext(sink); - -// PipeLine p("test"); -// p.appendModule(webCam); -// p.init(); -// p.run_all_threaded(); -// boost::this_thread::sleep_for(boost::chrono::seconds(100000)); -// p.stop(); -// p.term(); -// p.wait_for_all(); -// } - - -BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file From 1e63c5348b4c130cc601f341764af3cb17a536e4 Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Mon, 11 Sep 2023 18:30:40 +0530 Subject: [PATCH 03/19] Adding Pipeline.cpp changes --- base/src/PipeLine.cpp | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/base/src/PipeLine.cpp b/base/src/PipeLine.cpp index c51753802..b41f01626 100755 --- a/base/src/PipeLine.cpp +++ b/base/src/PipeLine.cpp @@ -32,6 +32,16 @@ bool PipeLine::appendModule(boost::shared_ptr pModule) return true; } +bool PipeLine::addControlModule(boost::shared_ptr cModule) +{ + for (int i = 0; i < modules.size(); i++) + { + modules[i]->controlModule = cModule; + cModule->pipelineModules.push_back(modules[i]); + } + return true; +} + bool PipeLine::checkCyclicDependency() { std::map< std::string, std::vector > dependencyMap; @@ -149,6 +159,11 @@ void PipeLine::run_all_threaded() m.myThread = boost::thread(ref(m)); Utils::setModuleThreadName(m.myThread, m.getId()); } + if ((modules[0]->controlModule) != nullptr) + { + Module& m = *(modules[0]->controlModule); + m.myThread = boost::thread(ref(m)); + } mPlay = true; } @@ -192,7 +207,7 @@ void PipeLine::step() // already playing return; } - + for (auto i = modules.begin(); i != modules.end(); i++) { if (i->get()->getNature() == Module::SOURCE) From a377deeda589e864c6ddb80d63872874a49fa4bb Mon Sep 17 00:00:00 2001 From: Venkat Date: Mon, 25 Sep 2023 12:05:57 +0530 Subject: [PATCH 04/19] sprint-3 changes --- base/include/Command.h | 53 +++++++++++++++++++++- base/include/ImageViewerModule.h | 2 + base/include/PipeLine.h | 1 - base/src/H264DecoderV4L2Helper.cpp | 4 +- base/src/H264DecoderV4L2Helper.h | 1 + base/src/ImageViewerModule.cpp | 71 ++++++++++++++++++++++++------ base/src/MultimediaQueueXform.cpp | 22 ++++++++- base/src/RTSPClientSrc.cpp | 9 +++- 8 files changed, 144 insertions(+), 19 deletions(-) diff --git a/base/include/Command.h b/base/include/Command.h index ad5ea1439..543cd33dd 100755 --- a/base/include/Command.h +++ b/base/include/Command.h @@ -21,9 +21,11 @@ class Command NVRCommandExport, NVRCommandExportMMQ, NVRCommandView, + NVRGoLive, NVRCommandExportView, MP4WriterLastTS, - MMQtimestamps + MMQtimestamps, + Rendertimestamp }; Command() @@ -414,6 +416,27 @@ class NVRCommandView : public Command } }; +class NVRGoLive : public Command +{ +public: + NVRGoLive() : Command(Command::CommandType::NVRGoLive) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize(); + } + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + } +}; + class NVRCommandExportView : public Command { public: @@ -498,6 +521,34 @@ class MMQtimestamps : public Command } }; +class Rendertimestamp : public Command +{ +public: + Rendertimestamp() : Command(Command::CommandType::Rendertimestamp) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(currentTimeStamp) +sizeof(moduleId); + } + + uint64_t currentTimeStamp = 0; + std::string moduleId; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& currentTimeStamp; + ar& moduleId; + } +}; + + + class PlayPauseCommand : public Command { public: diff --git a/base/include/ImageViewerModule.h b/base/include/ImageViewerModule.h index 4d0f07179..d8cf1687b 100755 --- a/base/include/ImageViewerModule.h +++ b/base/include/ImageViewerModule.h @@ -54,6 +54,7 @@ class ImageViewerModule : public Module bool term(); bool closeWindow(); bool createWindow(int width, int height); + bool showRender = true; protected: bool process(frame_container &frames); @@ -64,4 +65,5 @@ class ImageViewerModule : public Module bool handleCommand(Command::CommandType type, frame_sp &frame); boost::shared_ptr mDetail; ImageViewerModuleProps mProps; + uint64_t lastRenderTimestamp = 0; }; \ No newline at end of file diff --git a/base/include/PipeLine.h b/base/include/PipeLine.h index 142ffe512..b47b0d035 100755 --- a/base/include/PipeLine.h +++ b/base/include/PipeLine.h @@ -1,7 +1,6 @@ #pragma once #include #include -// #include "NVRControlModule.h" #include "AbsControlModule.h" #include "enum_macros.h" #include diff --git a/base/src/H264DecoderV4L2Helper.cpp b/base/src/H264DecoderV4L2Helper.cpp index af9c65335..90db3ac56 100644 --- a/base/src/H264DecoderV4L2Helper.cpp +++ b/base/src/H264DecoderV4L2Helper.cpp @@ -315,7 +315,8 @@ void h264DecoderV4L2Helper::read_input_chunk_frame_sp(frame_sp inpFrame, Buffer { return -1; } - + outputFrame->timestamp = incomingTimeStamp.front(); + incomingTimeStamp.pop(); send(outputFrame); return 0; @@ -1306,6 +1307,7 @@ bool h264DecoderV4L2Helper::init(std::function _send, std::func int h264DecoderV4L2Helper::process(frame_sp inputFrame) { uint32_t idx = 0; + incomingTimeStamp.push(inputFrame->timestamp); while (!ctx.eos && !ctx.in_error && idx < ctx.op_num_buffers) { struct v4l2_buffer queue_v4l2_buf_op; diff --git a/base/src/H264DecoderV4L2Helper.h b/base/src/H264DecoderV4L2Helper.h index eb8e9193e..b7abd67ab 100644 --- a/base/src/H264DecoderV4L2Helper.h +++ b/base/src/H264DecoderV4L2Helper.h @@ -392,4 +392,5 @@ class h264DecoderV4L2Helper std::function makeFrame; std::function send; int ret = 0; + std::queue incomingTimeStamp; }; diff --git a/base/src/ImageViewerModule.cpp b/base/src/ImageViewerModule.cpp index a5d433f62..b65fe100a 100755 --- a/base/src/ImageViewerModule.cpp +++ b/base/src/ImageViewerModule.cpp @@ -31,16 +31,16 @@ class DetailRenderer virtual bool view() = 0; - bool eglInitializer(uint32_t _height, uint32_t _width) + bool eglInitializer(uint32_t _height, uint32_t _width , uint32_t _x_offset , uint32_t _y_offset) { #if defined(__arm__) || defined(__aarch64__) uint32_t displayHeight, displayWidth; NvEglRenderer::getDisplayResolution(displayWidth, displayHeight); if (props.height != 0 && props.width != 0) { - props.x_offset += (displayWidth - props.width) / 2; - props.y_offset += (displayHeight - props.height) / 2; - renderer = NvEglRenderer::createEglRenderer(__TIMESTAMP__, props.width, props.height, props.x_offset, props.y_offset, props.displayOnTop); + _x_offset += (displayWidth - props.width) / 2; + _y_offset += (displayHeight - props.height) / 2; + renderer = NvEglRenderer::createEglRenderer(__TIMESTAMP__, props.width, props.height, _x_offset, _y_offset, props.displayOnTop); } else { @@ -94,6 +94,8 @@ class DetailRenderer public: frame_sp inputFrame; ImageViewerModuleProps props; + uint32_t x_offset = 0; + uint32_t y_offset = 0; protected: cv::Mat mImg; @@ -134,11 +136,11 @@ class DetailImageviewer : public DetailRenderer bool ImageViewerModule::validateInputPins() { - if (getNumberOfInputPins() != 1) - { - LOG_ERROR << "<" << getId() << ">::validateInputPins size is expected to be 1. Actual<" << getNumberOfInputPins() << ">"; - return false; - } + // if (getNumberOfInputPins() != 1) + // { + // LOG_ERROR << "<" << getId() << ">::validateInputPins size is expected to be 1. Actual<" << getNumberOfInputPins() << ">"; + // return false; + // } framemetadata_sp metadata = getFirstInputMetadata(); FrameMetadata::FrameType frameType = metadata->getFrameType(); FrameMetadata::MemType inputMemType = metadata->getMemType(); @@ -189,11 +191,28 @@ bool ImageViewerModule::term() { return Module::term(); } bool ImageViewerModule::process(frame_container &frames) { mDetail->inputFrame = frames.cbegin()->second; + auto TimeStamp = mDetail->inputFrame->timestamp; + if (isFrameEmpty(mDetail->inputFrame)) { return true; } - mDetail->view(); + auto newTime = mDetail->inputFrame->timestamp; + if((showRender) && (newTime > lastRenderTimestamp)) + { + mDetail->view(); + lastRenderTimestamp = mDetail->inputFrame->timestamp; + } + auto myId = Module::getId(); + if ((controlModule != nullptr) && (myId == "ImageViewerModule_3")) + { + Rendertimestamp cmd; + auto myTime = frames.cbegin()->second->timestamp; + cmd.currentTimeStamp = myTime; ++ controlModule->queueCommand(cmd); + return true; + } + return true; } @@ -225,7 +244,7 @@ bool ImageViewerModule::processSOS(frame_sp &frame) throw AIPException(AIP_FATAL, "Unsupported FrameType<" + std::to_string(frameType) + ">"); } - mDetail->eglInitializer(height, width); + mDetail->eglInitializer(height, width , mProps.x_offset , mProps.y_offset); #else mDetail->setMatImg(FrameMetadataFactory::downcast(inputMetadata)); #endif @@ -240,6 +259,13 @@ bool ImageViewerModule::shouldTriggerSOS() bool ImageViewerModule::handleCommand(Command::CommandType type, frame_sp &frame) { #if defined(__arm__) || defined(__aarch64__) + if (type == Command::CommandType::NVRGoLive) + { + NVRGoLive cmd; + getCommand(cmd, frame); + mDetail->destroyWindow(); + return true; + } if (type == Command::CommandType::DeleteWindow) { mDetail->destroyWindow(); @@ -249,9 +275,26 @@ bool ImageViewerModule::handleCommand(Command::CommandType type, frame_sp &frame { EglRendererCreateWindow cmd; getCommand(cmd, frame); - mDetail->eglInitializer(cmd.width, cmd.height); + mDetail->eglInitializer(cmd.height, cmd.width , mProps.x_offset , mProps.y_offset); return true; } + + else if (type == Command::CommandType::NVRCommandView) + { + NVRCommandView cmd; + getCommand(cmd, frame); + if(cmd.doView) + { + showRender = true; + return true; + } + else + { + showRender = false; + return true; + } + return true; + } return Module::handleCommand(type, frame); #else return true; @@ -272,8 +315,8 @@ bool ImageViewerModule::createWindow(int width, int height) { #if defined(__arm__) || defined(__aarch64__) EglRendererCreateWindow cmd; - cmd.width = width; - cmd.height = height; + cmd.width = 720; + cmd.height = 480; return queueCommand(cmd); #else return true; diff --git a/base/src/MultimediaQueueXform.cpp b/base/src/MultimediaQueueXform.cpp index e98307bd2..f216fe6fd 100644 --- a/base/src/MultimediaQueueXform.cpp +++ b/base/src/MultimediaQueueXform.cpp @@ -125,7 +125,7 @@ class GroupedFramesQueue : public FramesQueue auto ret = H264Utils::parseNalu(mFrameBuffer); tie(typeFound, spsBuff, ppsBuff) = ret; - BOOST_LOG_TRIVIAL(info) << "I-FRAME" << typeFound; + //BOOST_LOG_TRIVIAL(info) << "I-FRAME" << typeFound; if (spsBuff.size() != 0) { @@ -758,6 +758,26 @@ bool MultimediaQueueXform::process(frame_container& frames) queryEndTime = 0; setState(queryStartTime, queryEndTime); } + //This part is done only when Control module is connected + if (controlModule != nullptr) + { + //Send commmand to NVRControl module + if (mState->queueObject->mQueue.size() != 0) + { + MMQtimestamps cmd; + auto front = mState->queueObject->mQueue.begin(); + if (front != mState->queueObject->mQueue.end()) + { + uint64_t firstTimeStamp = front->first; + cmd.firstTimeStamp = firstTimeStamp; + } + auto back = mState->queueObject->mQueue.crbegin(); + uint64_t lastTimeStamp = back->first; + cmd.lastTimeStamp = lastTimeStamp; + controlModule->queueCommand(cmd); + } + return true; + } return true; } diff --git a/base/src/RTSPClientSrc.cpp b/base/src/RTSPClientSrc.cpp index 4d001f224..aabbff7b0 100644 --- a/base/src/RTSPClientSrc.cpp +++ b/base/src/RTSPClientSrc.cpp @@ -145,7 +145,14 @@ class RTSPClientSrc::Detail } } if(outFrames.size()>0) - myModule->send(outFrames); + { + std::chrono::time_point t = std::chrono::system_clock::now(); + auto dur = std::chrono::duration_cast(t.time_since_epoch()); + outFrames.begin()->second->timestamp = dur.count(); + auto timeStamp = dur.count(); + auto sizeIs = outFrames.size(); + //LOG_ERROR << "RTSP Time is "<< outFrames.begin()->second->timestamp; + myModule->send(outFrames); return true; } From 4d7e31f6ca6d7077c8c8d657b76843ba170706d1 Mon Sep 17 00:00:00 2001 From: venkat0907 Date: Mon, 25 Sep 2023 15:36:15 +0530 Subject: [PATCH 05/19] typo change in rtsp --- base/src/RTSPClientSrc.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/base/src/RTSPClientSrc.cpp b/base/src/RTSPClientSrc.cpp index aabbff7b0..7bb4f399f 100644 --- a/base/src/RTSPClientSrc.cpp +++ b/base/src/RTSPClientSrc.cpp @@ -154,6 +154,7 @@ class RTSPClientSrc::Detail //LOG_ERROR << "RTSP Time is "<< outFrames.begin()->second->timestamp; myModule->send(outFrames); return true; + } } bool isConncected() const { return bConnected; } From 70bf9b9156034fb4656c946be0f8e194f408579a Mon Sep 17 00:00:00 2001 From: Venkat Date: Fri, 29 Sep 2023 17:17:27 +0530 Subject: [PATCH 06/19] Play/pause and golive working checkpoint --- base/include/BoundBuffer.h | 28 +++++++ base/include/FrameContainerQueue.h | 2 + base/include/Module.h | 13 +++- base/include/MultimediaQueueXform.h | 9 ++- base/src/FrameContainerQueue.cpp | 10 +++ base/src/ImageViewerModule.cpp | 11 ++- base/src/Module.cpp | 13 +++- base/src/Mp4ReaderSource.cpp | 10 ++- base/src/MultimediaQueueXform.cpp | 111 ++++++++++++++++++++++++---- base/src/RTSPClientSrc.cpp | 43 +++++++---- 10 files changed, 214 insertions(+), 36 deletions(-) diff --git a/base/include/BoundBuffer.h b/base/include/BoundBuffer.h index 518eb5268..d993c35e0 100755 --- a/base/include/BoundBuffer.h +++ b/base/include/BoundBuffer.h @@ -38,6 +38,25 @@ class bounded_buffer } } + void push_back(typename boost::call_traits::param_type item) + { // `param_type` represents the "best" way to pass a parameter of type `value_type` to a method. + + boost::mutex::scoped_lock lock(m_mutex); + //m_not_full.wait(lock, boost::bind(&bounded_buffer::is_ready_to_accept, this)); + if (is_not_full() && m_accept) + { + m_container.push_back(item); + ++m_unread; + lock.unlock(); + m_not_empty.notify_one(); + } + else + { + // check and remove if explicit unlock is required + lock.unlock(); + } + } + void push_drop_oldest(typename boost::call_traits::param_type item) { boost::mutex::scoped_lock lock(m_mutex); @@ -93,6 +112,15 @@ class bounded_buffer return ret; } + value_type peek() { + boost::mutex::scoped_lock lock(m_mutex); + if (is_not_empty()) + { + value_type ret = m_container.back(); + return ret; + } + } + value_type try_pop() { boost::mutex::scoped_lock lock(m_mutex); if (is_not_empty()) diff --git a/base/include/FrameContainerQueue.h b/base/include/FrameContainerQueue.h index e72d2290e..48d60cc31 100755 --- a/base/include/FrameContainerQueue.h +++ b/base/include/FrameContainerQueue.h @@ -8,11 +8,13 @@ class FrameContainerQueue :public bounded_buffer { public: FrameContainerQueue(size_t capacity); virtual void push(frame_container item); + virtual void push_back(frame_container item); virtual void push_drop_oldest(frame_container item); virtual frame_container pop(); virtual bool try_push(frame_container item); virtual frame_container try_pop(); + virtual frame_container peek(); virtual bool isFull(); virtual void clear(); diff --git a/base/include/Module.h b/base/include/Module.h index 4c0fbcdeb..2fb5f585c 100644 --- a/base/include/Module.h +++ b/base/include/Module.h @@ -180,7 +180,7 @@ class Module { bool getPlayDirection() { return mDirection; } virtual void flushQueRecursive(); template - bool queueCommand(T& cmd) + bool queueCommand(T& cmd, bool priority = false) { auto size = cmd.getSerializeSize(); auto frame = makeCommandFrame(size, mCommandMetadata); @@ -190,8 +190,14 @@ class Module { // add to que frame_container frames; frames.insert(make_pair("command", frame)); - Module::push(frames); - + if(priority) + { + Module::push_back(frames); + } + else + { + Module::push(frames); + } return true; } protected: @@ -344,6 +350,7 @@ class Module { void setSieveDisabledFlag(bool sieve); frame_sp makeFrame(size_t size, framefactory_sp& framefactory); bool push(frame_container frameContainer); //exchanges the buffer + bool push_back(frame_container frameContainer); bool try_push(frame_container frameContainer); //tries to exchange the buffer bool addEoPFrame(frame_container& frames); diff --git a/base/include/MultimediaQueueXform.h b/base/include/MultimediaQueueXform.h index 1909d6778..9ab370ed6 100644 --- a/base/include/MultimediaQueueXform.h +++ b/base/include/MultimediaQueueXform.h @@ -44,7 +44,8 @@ class MultimediaQueueXform : public Module { bool handlePropsChange(frame_sp& frame); boost::shared_ptr mState; MultimediaQueueXformProps mProps; - + boost::shared_ptr getQue(); + void extractFramesAndEnqueue(boost::shared_ptr& FrameQueue); protected: bool process(frame_container& frames); bool validateInputPins(); @@ -61,4 +62,10 @@ class MultimediaQueueXform : public Module { uint64_t queryStartTime = 0; uint64_t queryEndTime = 0; FrameMetadata::FrameType mFrameType; + using sys_clock = std::chrono::system_clock; + sys_clock::time_point frame_begin; + std::chrono::nanoseconds myTargetFrameLen; + std::chrono::nanoseconds myNextWait; + uint64_t latestFrameExportedFromHandleCmd = 0; + bool initDone = false; }; diff --git a/base/src/FrameContainerQueue.cpp b/base/src/FrameContainerQueue.cpp index f6153f43b..cf6db1d74 100755 --- a/base/src/FrameContainerQueue.cpp +++ b/base/src/FrameContainerQueue.cpp @@ -9,6 +9,11 @@ void FrameContainerQueue::push(frame_container item) bounded_buffer::push(item); } +void FrameContainerQueue::push_back(frame_container item) +{ + bounded_buffer::push_back(item); +} + void FrameContainerQueue::push_drop_oldest(frame_container item) { bounded_buffer::push_drop_oldest(item); @@ -29,6 +34,11 @@ frame_container FrameContainerQueue::try_pop() return bounded_buffer::try_pop(); } +frame_container FrameContainerQueue::peek() +{ + return bounded_buffer::peek(); +} + bool FrameContainerQueue::isFull() { return bounded_buffer::isFull(); diff --git a/base/src/ImageViewerModule.cpp b/base/src/ImageViewerModule.cpp index b65fe100a..bfa361d0e 100755 --- a/base/src/ImageViewerModule.cpp +++ b/base/src/ImageViewerModule.cpp @@ -190,6 +190,11 @@ bool ImageViewerModule::term() { return Module::term(); } bool ImageViewerModule::process(frame_container &frames) { + auto myId = Module::getId(); + if(myId == "ImageViewerModule_3") + { + // LOG_ERROR<<"Check Me"; + } mDetail->inputFrame = frames.cbegin()->second; auto TimeStamp = mDetail->inputFrame->timestamp; @@ -198,12 +203,12 @@ bool ImageViewerModule::process(frame_container &frames) return true; } auto newTime = mDetail->inputFrame->timestamp; - if((showRender) && (newTime > lastRenderTimestamp)) + if((showRender))// && (newTime > lastRenderTimestamp)) { mDetail->view(); - lastRenderTimestamp = mDetail->inputFrame->timestamp; + //lastRenderTimestamp = mDetail->inputFrame->timestamp; } - auto myId = Module::getId(); + if ((controlModule != nullptr) && (myId == "ImageViewerModule_3")) { Rendertimestamp cmd; diff --git a/base/src/Module.cpp b/base/src/Module.cpp index 7c00ab596..86a5b9b0b 100644 --- a/base/src/Module.cpp +++ b/base/src/Module.cpp @@ -561,6 +561,12 @@ bool Module::push(frame_container frameContainer) return true; } +bool Module::push_back(frame_container frameContainer) +{ + mQue->push_back(frameContainer); + return true; +} + bool Module::try_push(frame_container frameContainer) { auto rc = mQue->try_push(frameContainer); @@ -600,6 +606,7 @@ bool Module::isNextModuleQueFull() { if (it->second->mQue->isFull()) { + auto modID = it->second->myId; ret = true; break; } @@ -720,10 +727,12 @@ bool Module::send(frame_container &frames, bool forceBlockingPush) // next module push if (!forceBlockingPush) { + //LOG_ERROR << "forceBlocking Push myID" << myId << "sending to <" << nextModuleId; mQuePushStrategy->push(nextModuleId, requiredPins); } else { + //LOG_ERROR << "normal push myID" << myId << "sending to <" << nextModuleId; mModules[nextModuleId]->push(requiredPins); } } @@ -1083,7 +1092,7 @@ bool Module::relay(boost::shared_ptr next, bool open) } auto cmd = RelayCommand(nextModuleId, open); - return queueCommand(cmd); + return queueCommand(cmd, true); } void Module::flushQueRecursive() @@ -1189,6 +1198,8 @@ bool Module::step() else { mProfiler->startPipelineLap(); + + //LOG_ERROR << "Module Id is " << Module::getId() << "Module FPS is " << Module::getPipelineFps() << mProps->fps; auto frames = mQue->pop(); preProcessNonSource(frames); diff --git a/base/src/Mp4ReaderSource.cpp b/base/src/Mp4ReaderSource.cpp index b267b7fb3..efab93ad9 100644 --- a/base/src/Mp4ReaderSource.cpp +++ b/base/src/Mp4ReaderSource.cpp @@ -1235,7 +1235,7 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) { boost::asio::mutable_buffer tmpBuffer(imgFrame->data(), imgFrame->size()); auto type = H264Utils::getNALUType((char*)tmpBuffer.data()); - if (type != H264Utils::H264_NAL_TYPE_END_OF_SEQ) + if (type == H264Utils::H264_NAL_TYPE_IDR_SLICE) { auto tempFrame = makeFrame(imgSize + spsSize + ppsSize + 8, h264ImagePinId); uint8_t* tempFrameBuffer = reinterpret_cast(tempFrame->data()); @@ -1244,8 +1244,12 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) memcpy(tempFrameBuffer, imgFrame->data(), imgSize); imgSize += spsSize + ppsSize + 8; imgFrame = tempFrame; + mState.shouldPrependSpsPps = false; + } + else if(type == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + mState.shouldPrependSpsPps = false; } - mState.shouldPrependSpsPps = false; } auto trimmedImgFrame = makeFrameTrim(imgFrame, imgSize, h264ImagePinId); @@ -1256,6 +1260,8 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) { frameData[3] = 0x1; frameData[spsSize + 7] = 0x1; + frameData[spsSize + ppsSize + 8] = 0x0; + frameData[spsSize + ppsSize + 9] = 0x0; frameData[spsSize + ppsSize + 10] = 0x0; frameData[spsSize + ppsSize + 11] = 0x1; } diff --git a/base/src/MultimediaQueueXform.cpp b/base/src/MultimediaQueueXform.cpp index f216fe6fd..7ece1f372 100644 --- a/base/src/MultimediaQueueXform.cpp +++ b/base/src/MultimediaQueueXform.cpp @@ -8,7 +8,7 @@ #include "H264Utils.h" #include "EncodedImageMetadata.h" #include "H264Metadata.h" - +#include "FrameContainerQueue.h" class FramesQueue { public: @@ -46,7 +46,7 @@ class IndependentFramesQueue : public FramesQueue largestTimeStamp = it->second->timestamp; } } - + //BOOST_LOG_TRIVIAL(info) << "queue size = " << mQueue.size(); if (isMapDelayInTime) // If the lower and upper watermark are given in time { if ((largestTimeStamp - mQueue.begin()->first > lowerWaterMark) && (pushToNextModule)) @@ -540,7 +540,7 @@ void MultimediaQueueXform::addInputPin(framemetadata_sp& metadata, string& pinId { Module::addInputPin(metadata, pinId); mOutputPinId = pinId; - addOutputPin(metadata, pinId); + //addOutputPin(metadata, pinId); } bool MultimediaQueueXform::init() @@ -555,8 +555,7 @@ bool MultimediaQueueXform::init() { auto& metadata = element.second; mFrameType = metadata->getFrameType(); - - if ((mFrameType == FrameMetadata::FrameType::ENCODED_IMAGE) || (mFrameType == FrameMetadata::FrameType::RAW_IMAGE)) + if ((mFrameType == FrameMetadata::FrameType::ENCODED_IMAGE) || (mFrameType == FrameMetadata::FrameType::RAW_IMAGE) || (mFrameType == FrameMetadata::FrameType::RAW_IMAGE_PLANAR)) { mState->queueObject.reset(new IndependentFramesQueue(mProps.lowerWaterMark, mProps.upperWaterMark, mProps.isMapDelayInTime)); } @@ -567,7 +566,7 @@ bool MultimediaQueueXform::init() } } mState.reset(new Idle(mState->queueObject)); - + myTargetFrameLen = std::chrono::nanoseconds(1000000000 / 22); return true; } @@ -606,7 +605,7 @@ void MultimediaQueueXform::setState(uint64_t tStart, uint64_t tEnd) else { - if ((mFrameType == FrameMetadata::FrameType::ENCODED_IMAGE) || (mFrameType == FrameMetadata::FrameType::RAW_IMAGE)) + if ((mFrameType == FrameMetadata::FrameType::ENCODED_IMAGE) || (mFrameType == FrameMetadata::FrameType::RAW_IMAGE) || (mFrameType == FrameMetadata::FrameType::RAW_IMAGE_PLANAR)) { mState.reset(new ExportJpeg(mState->queueObject, [&](frame_container& frames, bool forceBlockingPush = false) @@ -627,8 +626,44 @@ void MultimediaQueueXform::setState(uint64_t tStart, uint64_t tEnd) } +void MultimediaQueueXform::extractFramesAndEnqueue(boost::shared_ptr& frameQueue) +{ + //loop over frame container + auto frames = frameQueue->pop(); + for (auto itr = frames.begin(); itr != frames.end(); itr++) + { + if (itr->second->isCommand()) + { + auto cmdType = NoneCommand::getCommandType(itr->second->data(), itr->second->size()); + if(cmdType == Command::CommandType::Relay || cmdType == Command::CommandType::MultimediaQueueXform) + { + handleCommand(cmdType, itr->second); + } + else + { + frame_container commandFrame; + commandFrame.insert(make_pair(itr->first, itr->second)); + frameQueue->push_back(commandFrame); + } + } + else + { + frame_container framesContainer; + framesContainer.insert(make_pair(itr->first, itr->second)); + mState->queueObject->enqueue(framesContainer, pushToNextModule); + } + } +} + +boost::shared_ptr MultimediaQueueXform::getQue() +{ + return Module::getQue(); +} + bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& frame) { + myTargetFrameLen = std::chrono::nanoseconds(1000000000 / 22); + initDone = false; if (type == Command::CommandType::MultimediaQueueXform) { MultimediaQueueXformCommand cmd; @@ -654,21 +689,45 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr pushToNextModule = false; queryStartTime = it->first; queryStartTime--; - BOOST_LOG_TRIVIAL(info) << "The Queue of Next Module is full, waiting for queue to be free"; + //BOOST_LOG_TRIVIAL(info) << "The Queue of Next Module is full, waiting for queue to be free"; return true; } else { - mState->exportSend(it->second); + auto moduleQueue = getQue(); + if(moduleQueue->size()) + { + extractFramesAndEnqueue(moduleQueue); + } + if (!initDone) + { + myNextWait = myTargetFrameLen; + frame_begin = sys_clock::now(); + initDone = true; + } + + //LOG_ERROR << "multimediaQueueSize = " << queueSize; + frame_container outFrames; + auto outputId = Module::getOutputPinIdByType(FrameMetadata::RAW_IMAGE_PLANAR); + outFrames.insert(make_pair(outputId, it->second.begin()->second)); + //LOG_ERROR<<"sENDING FROM HANDLE COMMAND AT TIME "<< it->first; + mState->exportSend(outFrames); + latestFrameExportedFromHandleCmd = it->first; + std::chrono::nanoseconds frame_len = sys_clock::now() - frame_begin; + if (myNextWait > frame_len) + { + std::this_thread::sleep_for(myNextWait - frame_len); + } + myNextWait += myTargetFrameLen; } } } } - if (mState->Type == mState->EXPORT) { uint64_t tOld = 0, tNew = 0; - getQueueBoundaryTS(tOld, tNew); + //getQueueBoundaryTS(tOld, tNew); + tNew = latestFrameExportedFromHandleCmd; if (endTimeSaved > tNew) { @@ -686,6 +745,7 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr } return true; } + LOG_ERROR <<"RELAY COMMAND WAS HERE"; return Module::handleCommand(type, frame); } @@ -704,6 +764,7 @@ bool MultimediaQueueXform::allowFrames(uint64_t& ts, uint64_t& te) bool MultimediaQueueXform::process(frame_container& frames) { mState->queueObject->enqueue(frames, pushToNextModule); + //LOG_ERROR << frames.begin()->second->timestamp; if (mState->Type == State::EXPORT) { uint64_t tOld, tNew = 0; @@ -729,12 +790,36 @@ bool MultimediaQueueXform::process(frame_container& frames) pushToNextModule = false; queryStartTime = it->first; queryStartTime--; - BOOST_LOG_TRIVIAL(info) << "The Queue of Next Module is full, waiting for some space to be free"; + //BOOST_LOG_TRIVIAL(info) << "The Queue of Next Module is full, waiting for some space to be free"; return true; } else { - mState->exportSend(it->second); + auto moduleQueue = getQue(); + if(moduleQueue->size()) + { + extractFramesAndEnqueue(moduleQueue); + } + if (!initDone) + { + myNextWait = myTargetFrameLen; + frame_begin = sys_clock::now(); + initDone = true; + } + + //LOG_ERROR << "multimediaQueueSize = " << queueSize; + frame_container outFrames; + auto outputId = Module::getOutputPinIdByType(FrameMetadata::RAW_IMAGE_PLANAR); + outFrames.insert(make_pair(outputId, it->second.begin()->second)); + //LOG_ERROR<<"sENDING FROM PROCESS AT TIME "<< it->first; + mState->exportSend(outFrames); + std::chrono::nanoseconds frame_len = sys_clock::now() - frame_begin; + if (myNextWait > frame_len) + { + //LOG_ERROR << "is it sleeping in process"; + std::this_thread::sleep_for(myNextWait - frame_len); + } + myNextWait += myTargetFrameLen; } } diff --git a/base/src/RTSPClientSrc.cpp b/base/src/RTSPClientSrc.cpp index 7bb4f399f..2902cc1f0 100644 --- a/base/src/RTSPClientSrc.cpp +++ b/base/src/RTSPClientSrc.cpp @@ -12,6 +12,7 @@ using namespace std; #include #include #include +#include "H264Utils.h" extern "C" { @@ -131,11 +132,35 @@ class RTSPClientSrc::Detail } auto it = streamsMap.find(packet.stream_index); if (it != streamsMap.end()) { // so we have an interest in sending this - auto frm=myModule->makeFrame(packet.size, it->second); + frame_sp frm; + auto naluType = H264Utils::getNALUType((const char*)packet.data); + if (naluType == H264Utils::H264_NAL_TYPE_SEI) + { + size_t offset = 0; + packet.data += 4; + packet.size -= 4; + H264Utils::getNALUnit((const char*)packet.data, packet.size, offset); + packet.data += offset - 4; + packet.size -= offset - 4; + auto spsPpsData = pFormatCtx->streams[0]->codec->extradata; + auto spsPpsSize = pFormatCtx->streams[0]->codec->extradata_size;; + size_t totalFrameSize = packet.size + spsPpsSize; + + frm = myModule->makeFrame(totalFrameSize, it->second); + uint8_t* frameData = static_cast(frm->data()); + memcpy(frameData, spsPpsData, spsPpsSize); + frameData += spsPpsSize; + memcpy(frameData, packet.data, packet.size); + } + else + { + frm = myModule->makeFrame(packet.size, it->second); + memcpy(frm->data(), packet.data, packet.size); + } - //dreaded memory copy should be avoided - memcpy(frm->data(), packet.data, packet.size); - frm->timestamp = packet.pts; + std::chrono::time_point t = std::chrono::system_clock::now(); + auto dur = std::chrono::duration_cast(t.time_since_epoch()); + frm->timestamp = dur.count(); if (!outFrames.insert(make_pair(it->second, frm)).second) { LOG_WARNING << "oops! there is already another packet for pin " << it->second; @@ -145,16 +170,8 @@ class RTSPClientSrc::Detail } } if(outFrames.size()>0) - { - std::chrono::time_point t = std::chrono::system_clock::now(); - auto dur = std::chrono::duration_cast(t.time_since_epoch()); - outFrames.begin()->second->timestamp = dur.count(); - auto timeStamp = dur.count(); - auto sizeIs = outFrames.size(); - //LOG_ERROR << "RTSP Time is "<< outFrames.begin()->second->timestamp; - myModule->send(outFrames); + myModule->send(outFrames); return true; - } } bool isConncected() const { return bConnected; } From a4949a3f2dac0f3bea1fa2fe66555f71b27c1641 Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Mon, 9 Oct 2023 18:45:39 +0530 Subject: [PATCH 07/19] Sprint-4 changes for ApraPipes --- base/include/H264EncoderV4L2Helper.h | 2 + base/include/NVRControlModule.h | 66 ++++++++ base/src/H264DecoderV4L2Helper.cpp | 16 +- base/src/H264EncoderV4L2Helper.cpp | 3 + base/src/Mp4WriterSink.cpp | 127 +++++++------- base/src/NVRControlModule.cpp | 240 +++++++++++++++++++++++++++ base/src/NvEglRenderer.cpp | 24 ++- base/src/RTSPClientSrc.cpp | 9 +- base/vcpkg.json | 2 +- vcpkg | 2 +- 10 files changed, 416 insertions(+), 75 deletions(-) create mode 100644 base/include/NVRControlModule.h create mode 100644 base/src/NVRControlModule.cpp diff --git a/base/include/H264EncoderV4L2Helper.h b/base/include/H264EncoderV4L2Helper.h index 43e4fdd10..0e38c2a4b 100644 --- a/base/include/H264EncoderV4L2Helper.h +++ b/base/include/H264EncoderV4L2Helper.h @@ -60,4 +60,6 @@ class H264EncoderV4L2Helper framemetadata_sp h264Metadata; std::function makeFrame; std::unique_ptr mConverter; +protected: + std::queue incomingTimeStamp; }; \ No newline at end of file diff --git a/base/include/NVRControlModule.h b/base/include/NVRControlModule.h new file mode 100644 index 000000000..c9dfe46e6 --- /dev/null +++ b/base/include/NVRControlModule.h @@ -0,0 +1,66 @@ +#pragma once +#include "Module.h" +#include "AbsControlModule.h" + +class NVRControlModuleProps : public AbsControlModuleProps +{ +public: + NVRControlModuleProps() + { + } + size_t getSerializeSize() + { + return ModuleProps::getSerializeSize(); + } +private: + friend class boost::serialization::access; + + template + void serialize(Archive& ar, const unsigned int version) + { + ar& boost::serialization::base_object(*this); + } +}; + +class NVRControlModule : public AbsControlModule +{ + public: + NVRControlModule(NVRControlModuleProps _props); + ~NVRControlModule(); + bool init(); + bool term(); + void setProps(NVRControlModuleProps& props); + NVRControlModuleProps getProps(); + bool validateModuleRoles(); + bool nvrRecord(bool record); + bool nvrExport(uint64_t startTime, uint64_t stopTime); + bool nvrExportView(uint64_t startTime, uint64_t stopTime); + bool nvrView(bool view); + bool nvrGoLive(); + bool isRendererPaused = false; + uint64_t pausedTS = 0; + uint64_t mp4lastWrittenTS = 0; + uint64_t firstMMQtimestamp = 0; + uint64_t lastMMQtimestamp = 0; + uint64_t givenStart = 0; + uint64_t givenStop = 0; + uint64_t mp4_2_lastWrittenTS = 0; + bool isExporting = false; + bool isRenderWindowOpen = true; + bool isStateLive = true; + uint64_t currentRenderTS = 0; + bool isSavingVideo = false; + bool isExpWriterInitialized = true; + +protected: + bool validateInputPins(); + bool validateOutputPins(); + bool validateInputOutputPins(); + bool handleCommand(Command::CommandType type, frame_sp& frame); + bool handlePropsChange(frame_sp& frame); + +private: + void setMetadata(framemetadata_sp& metadata); + class Detail; + boost::shared_ptr mDetail; +}; \ No newline at end of file diff --git a/base/src/H264DecoderV4L2Helper.cpp b/base/src/H264DecoderV4L2Helper.cpp index 90db3ac56..47cbb74b9 100644 --- a/base/src/H264DecoderV4L2Helper.cpp +++ b/base/src/H264DecoderV4L2Helper.cpp @@ -303,7 +303,7 @@ void h264DecoderV4L2Helper::read_input_chunk_frame_sp(frame_sp inpFrame, Buffer * memory-mapped virtual address of the plane with the access * pointed by the flag into the void data-pointer. * Before the mapped memory is accessed, a call to NvBufferMemSyncForCpu() - * with the virtual address returned must be present before any access is made +* with the virtual address returned must be present before any access is made * by the CPU to the buffer. * * After reading the data, the memory-mapped virtual address of the @@ -371,7 +371,7 @@ void h264DecoderV4L2Helper::read_input_chunk_frame_sp(frame_sp inpFrame, Buffer return ret_val; } - void h264DecoderV4L2Helper::query_set_capture(context_t * ctx ,int &f_d) + void h264DecoderV4L2Helper::query_set_capture(context_t * ctx) { struct v4l2_format format; struct v4l2_crop crop; @@ -638,13 +638,11 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) ** Format and buffers are now set on capture. */ - auto outputFrame = m_nThread->makeFrame(); - auto dmaOutFrame = static_cast(outputFrame->data()); - int f_d = dmaOutFrame->getFd(); + if (!ctx->in_error) { - m_nThread->query_set_capture(ctx, f_d); + m_nThread->query_set_capture(ctx); } /* Check for resolution event to again @@ -659,7 +657,7 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) switch (event.type) { case V4L2_EVENT_RESOLUTION_CHANGE: - m_nThread->query_set_capture(ctx, f_d); + m_nThread->query_set_capture(ctx); continue; } } @@ -729,7 +727,9 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) /* Blocklinear to Pitch transformation is required ** to dump the raw decoded buffer data. */ - + auto outputFrame = m_nThread->makeFrame(); + auto dmaOutFrame = static_cast(outputFrame->data()); + int f_d = dmaOutFrame->getFd(); ret_val = NvBufferTransform(decoded_buffer->planes[0].fd,f_d, &transform_params); if (ret_val == -1) { diff --git a/base/src/H264EncoderV4L2Helper.cpp b/base/src/H264EncoderV4L2Helper.cpp index 2103ca238..9bb7eca1d 100644 --- a/base/src/H264EncoderV4L2Helper.cpp +++ b/base/src/H264EncoderV4L2Helper.cpp @@ -314,6 +314,8 @@ void H264EncoderV4L2Helper::capturePlaneDQCallback(AV4L2Buffer *buffer) auto frame = frame_sp(frame_opool.construct(buffer->planesInfo[0].data, buffer->v4l2_buf.m.planes[0].bytesused), std::bind(&H264EncoderV4L2Helper::reuseCatureBuffer, this, std::placeholders::_1, buffer->getIndex(), mSelf)); frame->setMetadata(h264Metadata); frame_container frames; + frame->timestamp = incomingTimeStamp.front(); + incomingTimeStamp.pop(); frames.insert(make_pair(h264FrameOutputPinId, frame)); if (enableMotionVectors) @@ -336,6 +338,7 @@ void H264EncoderV4L2Helper::reuseCatureBuffer(ExtFrame *pointer, uint32_t index, bool H264EncoderV4L2Helper::process(frame_sp& frame) { + incomingTimeStamp.push(frame->timestamp); auto buffer = mOutputPlane->getFreeBuffer(); if (!buffer) { diff --git a/base/src/Mp4WriterSink.cpp b/base/src/Mp4WriterSink.cpp index 335425277..ab2aadd48 100644 --- a/base/src/Mp4WriterSink.cpp +++ b/base/src/Mp4WriterSink.cpp @@ -238,6 +238,7 @@ class DetailAbs struct mp4_mux_track_params params, metatrack_params; struct mp4_video_decoder_config vdc; struct mp4_mux_sample mux_sample; + struct mp4_mux_prepend_buffer prepend_buffer; struct mp4_track_sample sample; int mHeight; @@ -274,13 +275,12 @@ class DetailH264 : public DetailAbs const_buffer ppsBuffer; const_buffer spsBuff; const_buffer ppsBuff; - short typeFound; DetailH264(Mp4WriterSinkProps& _props) : DetailAbs(_props) { } bool write(frame_container& frames); - uint8_t* AppendSizeInNaluSeprator(short naluType, frame_sp frame, size_t& frameSize); + void modifyFrameOnNewSPSPPS(short naluType, frame_sp frame, uint8_t*& spsPpsdata, size_t& spsPpsSize, uint8_t*& frameData, size_t& frameSize); bool set_video_decoder_config() { @@ -368,11 +368,11 @@ bool DetailJpeg::write(frame_container& frames) return true; } -uint8_t* DetailH264::AppendSizeInNaluSeprator(short naluType, frame_sp inH264ImageFrame, size_t& frameSize) +void DetailH264::modifyFrameOnNewSPSPPS(short naluType, frame_sp inH264ImageFrame, uint8_t*& spsPpsBuffer, size_t& spsPpsSize, uint8_t*& frameData, size_t& frameSize) { char NaluSeprator[3] = { 00 ,00, 00 }; auto nalu = reinterpret_cast(NaluSeprator); - uint spsPpsSize = spsBuffer.size() + ppsBuffer.size() + 8; + spsPpsSize = spsBuffer.size() + ppsBuffer.size() + 8; if (naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) { frameSize = inH264ImageFrame->size(); @@ -382,44 +382,42 @@ uint8_t* DetailH264::AppendSizeInNaluSeprator(short naluType, frame_sp inH264Ima { frameSize = inH264ImageFrame->size() + spsPpsSize; } - uint8_t* newBuffer = new uint8_t[frameSize]; + spsPpsBuffer = new uint8_t[spsPpsSize + 4]; //add the size of sps to the 4th byte of sps's nalu seprator (00 00 00 SpsSize 67) - memcpy(newBuffer, nalu, 3); - newBuffer += 3; - newBuffer[0] = spsBuffer.size(); - newBuffer += 1; - memcpy(newBuffer, spsBuffer.data(), spsBuffer.size()); - newBuffer += spsBuffer.size(); + memcpy(spsPpsBuffer, nalu, 3); + spsPpsBuffer += 3; + spsPpsBuffer[0] = spsBuffer.size(); + spsPpsBuffer += 1; + memcpy(spsPpsBuffer, spsBuffer.data(), spsBuffer.size()); + spsPpsBuffer += spsBuffer.size(); //add the size of sps to the 4th byte of pps's nalu seprator (00 00 00 PpsSize 68) - memcpy(newBuffer, nalu, 3); - newBuffer += 3; - newBuffer[0] = ppsBuffer.size(); - newBuffer += 1; - memcpy(newBuffer, ppsBuffer.data(), ppsBuffer.size()); - newBuffer += ppsBuffer.size(); + memcpy(spsPpsBuffer, nalu, 3); + spsPpsBuffer += 3; + spsPpsBuffer[0] = ppsBuffer.size(); + spsPpsBuffer += 1; + memcpy(spsPpsBuffer, ppsBuffer.data(), ppsBuffer.size()); + spsPpsBuffer += ppsBuffer.size(); //add the size of I frame to the I frame's nalu seprator - newBuffer[0] = (frameSize - spsPpsSize - 4 >> 24) & 0xFF; - newBuffer[1] = (frameSize - spsPpsSize - 4 >> 16) & 0xFF; - newBuffer[2] = (frameSize - spsPpsSize - 4 >> 8) & 0xFF; - newBuffer[3] = frameSize - spsPpsSize - 4 & 0xFF; - newBuffer += 4; + spsPpsBuffer[0] = (frameSize - spsPpsSize - 4 >> 24) & 0xFF; + spsPpsBuffer[1] = (frameSize - spsPpsSize - 4 >> 16) & 0xFF; + spsPpsBuffer[2] = (frameSize - spsPpsSize - 4 >> 8) & 0xFF; + spsPpsBuffer[3] = frameSize - spsPpsSize - 4 & 0xFF; - uint8_t* tempBuffer = reinterpret_cast(inH264ImageFrame->data()); + frameData = reinterpret_cast(inH264ImageFrame->data()); if (naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) { - tempBuffer = tempBuffer + spsPpsSize + 4; + frameData = frameData + spsPpsSize + 4; + frameSize = frameSize - spsPpsSize - 4; } else if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE) { - tempBuffer = tempBuffer + 4; + frameData = frameData + 4; + frameSize -= 4; } - //copy I frame data to the buffer - memcpy(newBuffer, tempBuffer, frameSize - spsPpsSize - 4); - //set the pointer to the starting of frame - newBuffer -= spsPpsSize + 4; - return newBuffer; + spsPpsBuffer = spsPpsBuffer - spsPpsSize; + spsPpsSize += 4; } bool DetailH264::write(frame_container& frames) @@ -434,6 +432,7 @@ bool DetailH264::write(frame_container& frames) auto mFrameBuffer = const_buffer(inH264ImageFrame->data(), inH264ImageFrame->size()); auto ret = H264Utils::parseNalu(mFrameBuffer); + short typeFound; tie(typeFound, spsBuff, ppsBuff) = ret; if ((spsBuff.size() !=0 ) || (ppsBuff.size() != 0)) @@ -442,10 +441,10 @@ bool DetailH264::write(frame_container& frames) spsBuffer = spsBuff; ppsBuffer = ppsBuff; } - + auto naluType = H264Utils::getNALUType((char*)mFrameBuffer.data()); std::string _nextFrameFileName; mWriterSinkUtils.getFilenameForNextFrame(_nextFrameFileName,inH264ImageFrame->timestamp, mProps->baseFolder, - mProps->chunkTime, mProps->syncTimeInSecs, syncFlag,mFrameType, typeFound); + mProps->chunkTime, mProps->syncTimeInSecs, syncFlag,mFrameType, naluType); if (_nextFrameFileName == "") { @@ -453,38 +452,49 @@ bool DetailH264::write(frame_container& frames) return false; } - uint8_t* frameData = reinterpret_cast(inH264ImageFrame->data()); - // assign size of the frame to the NALU seperator for playability in default players - frameData[0] = (inH264ImageFrame->size() - 4 >> 24) & 0xFF; - frameData[1] = (inH264ImageFrame->size() - 4 >> 16) & 0xFF; - frameData[2] = (inH264ImageFrame->size() - 4 >> 8) & 0xFF; - frameData[3] = inH264ImageFrame->size() - 4 & 0xFF; - - mux_sample.buffer = frameData; - mux_sample.len = inH264ImageFrame->size(); - auto naluType = H264Utils::getNALUType((char*)mFrameBuffer.data()); + uint8_t* spsPpsBuffer = nullptr; + size_t spsPpsSize; + uint8_t* frameData = nullptr; size_t frameSize; if (mNextFrameFileName != _nextFrameFileName) { mNextFrameFileName = _nextFrameFileName; initNewMp4File(mNextFrameFileName); - if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE) + if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) { - //add sps and pps to I-frame and change the Nalu seprator according to Mp4 format - auto newBuffer = AppendSizeInNaluSeprator(naluType, inH264ImageFrame, frameSize); - mux_sample.buffer = newBuffer; + // new video + modifyFrameOnNewSPSPPS(naluType, inH264ImageFrame, spsPpsBuffer, spsPpsSize, frameData, frameSize); + prepend_buffer.buffer = spsPpsBuffer; + prepend_buffer.len = spsPpsSize; + mux_sample.buffer = frameData; mux_sample.len = frameSize; } } - - if (naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + else if (naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) { - //change the Nalu seprator according to Mp4 format - auto newBuffer = AppendSizeInNaluSeprator(naluType, inH264ImageFrame, frameSize); - mux_sample.buffer = newBuffer; + // new sps pps + modifyFrameOnNewSPSPPS(naluType, inH264ImageFrame, spsPpsBuffer, spsPpsSize, frameData, frameSize); + prepend_buffer.buffer = spsPpsBuffer; + prepend_buffer.len = spsPpsSize; + mux_sample.buffer = frameData; mux_sample.len = frameSize; } + else + { + uint8_t* naluData = new uint8_t[4]; + // assign size of the frame to the NALU seperator for playability in default players + naluData[0] = (inH264ImageFrame->size() - 4 >> 24) & 0xFF; + naluData[1] = (inH264ImageFrame->size() - 4 >> 16) & 0xFF; + naluData[2] = (inH264ImageFrame->size() - 4 >> 8) & 0xFF; + naluData[3] = inH264ImageFrame->size() - 4 & 0xFF; + prepend_buffer.buffer = naluData; + prepend_buffer.len = 4; + + uint8_t* frameData = static_cast(inH264ImageFrame->data()); + mux_sample.buffer = frameData + 4; + mux_sample.len = inH264ImageFrame->size() - 4; + } if (syncFlag) { @@ -493,14 +503,12 @@ bool DetailH264::write(frame_container& frames) syncFlag = false; } - if (typeFound == H264Utils::H264_NAL_TYPE::H264_NAL_TYPE_IDR_SLICE) + isKeyFrame = false; + + if (naluType == H264Utils::H264_NAL_TYPE::H264_NAL_TYPE_IDR_SLICE || naluType == H264Utils::H264_NAL_TYPE::H264_NAL_TYPE_SEQ_PARAM) { isKeyFrame = true; } - else - { - isKeyFrame = false; - } addMetadataInVideoHeader(inH264ImageFrame); @@ -519,7 +527,7 @@ bool DetailH264::write(frame_container& frames) lastFrameTS = inH264ImageFrame->timestamp; mux_sample.dts = mux_sample.dts + static_cast((params.timescale / 1000) * diffInMsecs); - mp4_mux_track_add_sample(mux, videotrack, &mux_sample); + mp4_mux_track_add_sample_with_prepend_buffer(mux, videotrack, &prepend_buffer, &mux_sample); if (metatrack != -1 && mMetadataEnabled) { @@ -596,7 +604,7 @@ bool Mp4WriterSink::validateInputOutputPins() bool Mp4WriterSink::validateInputPins() { - if (getNumberOfInputPins() > 2) + if (getNumberOfInputPins() > 5) { LOG_ERROR << "<" << getId() << ">::validateInputPins size is expected to be 2. Actual<" << getNumberOfInputPins() << ">"; return false; @@ -722,5 +730,4 @@ bool Mp4WriterSink::handlePropsChange(frame_sp& frame) void Mp4WriterSink::setProps(Mp4WriterSinkProps& props) { Module::addPropsToQueue(props); -} - +} \ No newline at end of file diff --git a/base/src/NVRControlModule.cpp b/base/src/NVRControlModule.cpp new file mode 100644 index 000000000..1640fe5ef --- /dev/null +++ b/base/src/NVRControlModule.cpp @@ -0,0 +1,240 @@ +#include +#include +#include "NVRControlModule.h" +#include "Mp4WriterSink.h" +#include "Module.h" +#include "Command.h" + +class NVRControlModule::Detail +{ +public: + Detail(NVRControlModuleProps& _props) : mProps(_props) + { + } + + ~Detail() + { + } + void setProps(NVRControlModuleProps _props) + { + mProps = _props; + } + NVRControlModuleProps mProps; +}; + + +NVRControlModule::NVRControlModule(NVRControlModuleProps _props) + :AbsControlModule(_props) +{ + mDetail.reset(new Detail(_props)); +} + +NVRControlModule::~NVRControlModule() {} + +bool NVRControlModule::validateInputPins() +{ + return true; +} + +bool NVRControlModule::validateOutputPins() +{ + return true; +} + +bool NVRControlModule::validateInputOutputPins() +{ + return true; +} + +bool NVRControlModule::handleCommand(Command::CommandType type, frame_sp& frame) +{ + if (type == Command::CommandType::NVRCommandView) + { + NVRCommandView cmd; + getCommand(cmd, frame); + if(cmd.doView == false) + { + MultimediaQueueXformCommand cmd; + pausedTS = currentRenderTS; + cmd.startTime = pausedTS; + cmd.endTime = pausedTS + 10; + + EglRendererCreateWindow comd; + if(isRenderWindowOpen == false) + { + for (int i = 0; i < pipelineModules.size(); i++) + { + if (pipelineModules[i] == getModuleofRole("Renderer_2")) // Sending command to multimediaQueue + { + auto myid = pipelineModules[i]->getId(); + pipelineModules[i]->queueCommand(comd); + } + } + isRenderWindowOpen = true; + + } + for (int i = 0; i < pipelineModules.size(); i++) + { + if (pipelineModules[i] == getModuleofRole("MultimediaQueue")) // Sending command to multimediaQueue + { + auto myid = pipelineModules[i]->getId(); + pipelineModules[i]->queueCommand(cmd); + } + } + + } + return true; + } + if (type == Command::CommandType::NVRCommandExportView) + { + LOG_ERROR<<" I AM IN EXPORT VIEW"; + NVRCommandExportView cmd; + getCommand(cmd, frame); + givenStart = cmd.startViewTS; + givenStop = cmd.stopViewTS; + if(pausedTS < firstMMQtimestamp) + { + LOG_ERROR<<" The seeked start time is in disk!!"; + Mp4SeekCommand command; + command.seekStartTS = currentRenderTS + 50; + command.forceReopen = false; + for (int i = 0; i < pipelineModules.size(); i++) + { + if (pipelineModules[i] == getModuleofRole("Reader_1")) // Sending command to reader + { + auto myId = pipelineModules[i]->getId(); + pipelineModules[i]->queueCommand(command); + pipelineModules[i]->play(true); + return true; + } + } + } + else + { + LOG_ERROR<<" The seeked start time is in MULTIMEDIA-QUEUE!!"; + MultimediaQueueXformCommand cmd; + cmd.startTime = currentRenderTS + 50; + cmd.endTime = currentRenderTS + 100000; + for (int i = 0; i < pipelineModules.size(); i++) + { + if (pipelineModules[i] == getModuleofRole("MultimediaQueue")) // Sending command to multimediaQueue + { + auto myid = pipelineModules[i]->getId(); + pipelineModules[i]->queueCommand(cmd); + } + } + } + + return true; + + } + + if (type == Command::CommandType::MMQtimestamps) + { + MMQtimestamps cmd; + getCommand(cmd, frame); + firstMMQtimestamp = cmd.firstTimeStamp; + lastMMQtimestamp = cmd.lastTimeStamp; + return true; + } + + if (type == Command::CommandType::Rendertimestamp) + { + Rendertimestamp cmd; + getCommand(cmd, frame); + currentRenderTS = cmd.currentTimeStamp; + //LOG_ERROR<<"currentRenderTS is " <mProps); + auto ret = Module::handlePropsChange(frame, props); + mDetail->setProps(props); + return ret; +} + +bool NVRControlModule::init() +{ + if (!Module::init()) + { + return false; + } + return true; +} + +bool NVRControlModule::term() +{ + return Module::term(); +} + +NVRControlModuleProps NVRControlModule::getProps() +{ + fillProps(mDetail->mProps); + return mDetail->mProps; +} + +void NVRControlModule::setProps(NVRControlModuleProps& props) +{ + Module::addPropsToQueue(props); +} + +bool NVRControlModule::validateModuleRoles() +{ + for (int i = 0; i < pipelineModules.size(); i++) + { + bool modPresent = false; + for (auto it = moduleRoles.begin(); it != moduleRoles.end(); it++) + { + if (pipelineModules[i] == it->second) + { + modPresent = true; + } + } + if (!modPresent) + { + LOG_ERROR << "Modules and roles validation failed!!"; + } + } + return true; +} + +bool NVRControlModule::nvrRecord(bool record) +{ + NVRCommandRecord cmd; + cmd.doRecording = record; + return queueCommand(cmd); +} + +bool NVRControlModule::nvrExport(uint64_t ts, uint64_t te) +{ + NVRCommandExport cmd; + cmd.startExportTS = ts; + cmd.stopExportTS = te; + return queueCommand(cmd); +} + +bool NVRControlModule::nvrExportView(uint64_t ts, uint64_t te) +{ + NVRCommandExportView cmd; + cmd.startViewTS = ts; + cmd.stopViewTS = te; + return queueCommand(cmd); +} + +bool NVRControlModule::nvrView(bool view) +{ + NVRCommandView cmd; + cmd.doView = view; + return queueCommand(cmd); +} + +bool NVRControlModule::nvrGoLive() +{ + NVRGoLive cmd; + return queueCommand(cmd); +} \ No newline at end of file diff --git a/base/src/NvEglRenderer.cpp b/base/src/NvEglRenderer.cpp index f1d5c12e7..07ff7a27c 100644 --- a/base/src/NvEglRenderer.cpp +++ b/base/src/NvEglRenderer.cpp @@ -104,6 +104,7 @@ NvEglRenderer::NvEglRenderer(const char *name, uint32_t width, uint32_t height, y_offset = 0; } + window_attributes.override_redirect = 0; depth = DefaultDepth(x_display, DefaultScreen(x_display)); @@ -136,7 +137,7 @@ NvEglRenderer::NvEglRenderer(const char *name, uint32_t width, uint32_t height, if(window_attributes.override_redirect == 0) { - XStoreName(x_display, x_window, "ApraEglRenderer"); + XStoreName(x_display, x_window, "LIVE WINDOW"); XFlush(x_display); XSizeHints hints; @@ -147,9 +148,24 @@ NvEglRenderer::NvEglRenderer(const char *name, uint32_t width, uint32_t height, hints.flags = PPosition | PSize; XSetWMNormalHints(x_display, x_window, &hints); - WM_HINTS = XInternAtom(x_display, "_MOTIF_WM_HINTS", True); - XChangeProperty(x_display, x_window, WM_HINTS, WM_HINTS, 32, - PropModeReplace, (unsigned char *)&WM_HINTS, 5); + // Set Motif hints for window manager + Atom _MOTIF_WM_HINTS = XInternAtom(x_display, "_MOTIF_WM_HINTS", True); + if (_MOTIF_WM_HINTS != None) + { + struct + { + unsigned long flags; + unsigned long functions; + unsigned long decorations; + long inputMode; + unsigned long status; + } WM_HINTS = { (1L << 1), 0, 1, 0, 0 }; // Setting decorations to 1 adds title bar + XChangeProperty(x_display, x_window, _MOTIF_WM_HINTS, _MOTIF_WM_HINTS, 32, + PropModeReplace, (unsigned char *)&WM_HINTS, 5); + } + + Atom WM_DELETE_WINDOW = XInternAtom(x_display, "WM_DELETE_WINDOW", False); + XSetWMProtocols(x_display, x_window, &WM_DELETE_WINDOW, 1); } XSelectInput(x_display, (int32_t) x_window, ExposureMask); diff --git a/base/src/RTSPClientSrc.cpp b/base/src/RTSPClientSrc.cpp index 2902cc1f0..c88e4afad 100644 --- a/base/src/RTSPClientSrc.cpp +++ b/base/src/RTSPClientSrc.cpp @@ -221,5 +221,12 @@ bool RTSPClientSrc::validateOutputPins() { return this->getNumberOfOutputPins() > 0; } void RTSPClientSrc::notifyPlay(bool play) {} -bool RTSPClientSrc::handleCommand(Command::CommandType type, frame_sp& frame) { return true; } +bool RTSPClientSrc::handleCommand(Command::CommandType type, frame_sp& frame) +{ + if (type == Command::CommandType::Relay) + { + return Module::handleCommand(type, frame); + } + return true; +} bool RTSPClientSrc::handlePropsChange(frame_sp& frame) { return true; } diff --git a/base/vcpkg.json b/base/vcpkg.json index 16df1ad20..8bbc7870f 100644 --- a/base/vcpkg.json +++ b/base/vcpkg.json @@ -2,7 +2,7 @@ "$schema": "https://raw.githubusercontent.com/microsoft/vcpkg/master/scripts/vcpkg.schema.json", "name": "apra-pipes-cuda", "version": "0.0.1", - "builtin-baseline": "e839c8f19e3aa844ed0a6212e05349c90b85dde0", + "builtin-baseline": "356814e3b10f457f01d9dfdc45e1b2cac0ff6b60", "dependencies": [ { "name": "opencv4", diff --git a/vcpkg b/vcpkg index e839c8f19..356814e3b 160000 --- a/vcpkg +++ b/vcpkg @@ -1 +1 @@ -Subproject commit e839c8f19e3aa844ed0a6212e05349c90b85dde0 +Subproject commit 356814e3b10f457f01d9dfdc45e1b2cac0ff6b60 From 697474220b84d4f826e1312191a4d7dcae95f225 Mon Sep 17 00:00:00 2001 From: Venkat Date: Thu, 26 Oct 2023 12:34:12 +0530 Subject: [PATCH 08/19] Thumnail generatormodule --- base/CMakeLists.txt | 3 + base/include/NVRControlModule.h | 66 ------- base/include/ThumbnailListGenerator.h | 56 ++++++ base/src/NVRControlModule.cpp | 240 ------------------------- base/src/ThumbnailListGenerator.cpp | 229 +++++++++++++++++++++++ base/test/thumbnailgenerator_tests.cpp | 82 +++++++++ 6 files changed, 370 insertions(+), 306 deletions(-) delete mode 100644 base/include/NVRControlModule.h create mode 100644 base/include/ThumbnailListGenerator.h delete mode 100644 base/src/NVRControlModule.cpp create mode 100644 base/src/ThumbnailListGenerator.cpp create mode 100644 base/test/thumbnailgenerator_tests.cpp diff --git a/base/CMakeLists.txt b/base/CMakeLists.txt index 0a002c206..c710af12b 100755 --- a/base/CMakeLists.txt +++ b/base/CMakeLists.txt @@ -282,6 +282,7 @@ SET(IP_FILES src/OverlayFactory.cpp src/TestSignalGeneratorSrc.cpp src/AbsControlModule.cpp + src/ThumbnailListGenerator.cpp ) @@ -308,6 +309,7 @@ SET(IP_FILES_H include/TextOverlayXForm.h include/ColorConversionXForm.h include/Overlay.h + include/ThumbnailListGenerator.h ) @@ -564,6 +566,7 @@ SET(UT_FILES test/overlaymodule_tests.cpp test/testSignalGeneratorSrc_tests.cpp test/abscontrolmodule_tests.cpp + test/thumbnailgenerator_tests.cpp ${ARM64_UT_FILES} ${CUDA_UT_FILES} ) diff --git a/base/include/NVRControlModule.h b/base/include/NVRControlModule.h deleted file mode 100644 index c9dfe46e6..000000000 --- a/base/include/NVRControlModule.h +++ /dev/null @@ -1,66 +0,0 @@ -#pragma once -#include "Module.h" -#include "AbsControlModule.h" - -class NVRControlModuleProps : public AbsControlModuleProps -{ -public: - NVRControlModuleProps() - { - } - size_t getSerializeSize() - { - return ModuleProps::getSerializeSize(); - } -private: - friend class boost::serialization::access; - - template - void serialize(Archive& ar, const unsigned int version) - { - ar& boost::serialization::base_object(*this); - } -}; - -class NVRControlModule : public AbsControlModule -{ - public: - NVRControlModule(NVRControlModuleProps _props); - ~NVRControlModule(); - bool init(); - bool term(); - void setProps(NVRControlModuleProps& props); - NVRControlModuleProps getProps(); - bool validateModuleRoles(); - bool nvrRecord(bool record); - bool nvrExport(uint64_t startTime, uint64_t stopTime); - bool nvrExportView(uint64_t startTime, uint64_t stopTime); - bool nvrView(bool view); - bool nvrGoLive(); - bool isRendererPaused = false; - uint64_t pausedTS = 0; - uint64_t mp4lastWrittenTS = 0; - uint64_t firstMMQtimestamp = 0; - uint64_t lastMMQtimestamp = 0; - uint64_t givenStart = 0; - uint64_t givenStop = 0; - uint64_t mp4_2_lastWrittenTS = 0; - bool isExporting = false; - bool isRenderWindowOpen = true; - bool isStateLive = true; - uint64_t currentRenderTS = 0; - bool isSavingVideo = false; - bool isExpWriterInitialized = true; - -protected: - bool validateInputPins(); - bool validateOutputPins(); - bool validateInputOutputPins(); - bool handleCommand(Command::CommandType type, frame_sp& frame); - bool handlePropsChange(frame_sp& frame); - -private: - void setMetadata(framemetadata_sp& metadata); - class Detail; - boost::shared_ptr mDetail; -}; \ No newline at end of file diff --git a/base/include/ThumbnailListGenerator.h b/base/include/ThumbnailListGenerator.h new file mode 100644 index 000000000..9d57b6109 --- /dev/null +++ b/base/include/ThumbnailListGenerator.h @@ -0,0 +1,56 @@ +#pragma once +#include "Module.h" + +class ThumbnailListGeneratorProps : public ModuleProps +{ +public: + ThumbnailListGeneratorProps(int _thumbnailWidth, int _thumbnailHeight, std::string _fileToStore) : ModuleProps() + { + thumbnailWidth = _thumbnailWidth; + thumbnailHeight = _thumbnailHeight; + fileToStore = _fileToStore; + } + + int thumbnailWidth; + int thumbnailHeight; + std::string fileToStore; + + size_t getSerializeSize() + { + return ModuleProps::getSerializeSize() + sizeof(int) * 2 + sizeof(fileToStore); + } + +private: + friend class boost::serialization::access; + + template + void serialize(Archive &ar, const unsigned int version) + { + ar &boost::serialization::base_object(*this); + ar &thumbnailWidth; + ar &thumbnailHeight; + ar &fileToStore; + } +}; +class ThumbnailListGenerator : public Module +{ + +public: + ThumbnailListGenerator(ThumbnailListGeneratorProps _props); + virtual ~ThumbnailListGenerator(); + bool init(); + bool term(); + void setProps(ThumbnailListGeneratorProps &props); + ThumbnailListGeneratorProps getProps(); + +protected: + bool process(frame_container &frames); + bool validateInputPins(); + // bool processSOS(frame_sp &frame); + // bool shouldTriggerSOS(); + bool handlePropsChange(frame_sp &frame); + +private: + class Detail; + boost::shared_ptr mDetail; +}; diff --git a/base/src/NVRControlModule.cpp b/base/src/NVRControlModule.cpp deleted file mode 100644 index 1640fe5ef..000000000 --- a/base/src/NVRControlModule.cpp +++ /dev/null @@ -1,240 +0,0 @@ -#include -#include -#include "NVRControlModule.h" -#include "Mp4WriterSink.h" -#include "Module.h" -#include "Command.h" - -class NVRControlModule::Detail -{ -public: - Detail(NVRControlModuleProps& _props) : mProps(_props) - { - } - - ~Detail() - { - } - void setProps(NVRControlModuleProps _props) - { - mProps = _props; - } - NVRControlModuleProps mProps; -}; - - -NVRControlModule::NVRControlModule(NVRControlModuleProps _props) - :AbsControlModule(_props) -{ - mDetail.reset(new Detail(_props)); -} - -NVRControlModule::~NVRControlModule() {} - -bool NVRControlModule::validateInputPins() -{ - return true; -} - -bool NVRControlModule::validateOutputPins() -{ - return true; -} - -bool NVRControlModule::validateInputOutputPins() -{ - return true; -} - -bool NVRControlModule::handleCommand(Command::CommandType type, frame_sp& frame) -{ - if (type == Command::CommandType::NVRCommandView) - { - NVRCommandView cmd; - getCommand(cmd, frame); - if(cmd.doView == false) - { - MultimediaQueueXformCommand cmd; - pausedTS = currentRenderTS; - cmd.startTime = pausedTS; - cmd.endTime = pausedTS + 10; - - EglRendererCreateWindow comd; - if(isRenderWindowOpen == false) - { - for (int i = 0; i < pipelineModules.size(); i++) - { - if (pipelineModules[i] == getModuleofRole("Renderer_2")) // Sending command to multimediaQueue - { - auto myid = pipelineModules[i]->getId(); - pipelineModules[i]->queueCommand(comd); - } - } - isRenderWindowOpen = true; - - } - for (int i = 0; i < pipelineModules.size(); i++) - { - if (pipelineModules[i] == getModuleofRole("MultimediaQueue")) // Sending command to multimediaQueue - { - auto myid = pipelineModules[i]->getId(); - pipelineModules[i]->queueCommand(cmd); - } - } - - } - return true; - } - if (type == Command::CommandType::NVRCommandExportView) - { - LOG_ERROR<<" I AM IN EXPORT VIEW"; - NVRCommandExportView cmd; - getCommand(cmd, frame); - givenStart = cmd.startViewTS; - givenStop = cmd.stopViewTS; - if(pausedTS < firstMMQtimestamp) - { - LOG_ERROR<<" The seeked start time is in disk!!"; - Mp4SeekCommand command; - command.seekStartTS = currentRenderTS + 50; - command.forceReopen = false; - for (int i = 0; i < pipelineModules.size(); i++) - { - if (pipelineModules[i] == getModuleofRole("Reader_1")) // Sending command to reader - { - auto myId = pipelineModules[i]->getId(); - pipelineModules[i]->queueCommand(command); - pipelineModules[i]->play(true); - return true; - } - } - } - else - { - LOG_ERROR<<" The seeked start time is in MULTIMEDIA-QUEUE!!"; - MultimediaQueueXformCommand cmd; - cmd.startTime = currentRenderTS + 50; - cmd.endTime = currentRenderTS + 100000; - for (int i = 0; i < pipelineModules.size(); i++) - { - if (pipelineModules[i] == getModuleofRole("MultimediaQueue")) // Sending command to multimediaQueue - { - auto myid = pipelineModules[i]->getId(); - pipelineModules[i]->queueCommand(cmd); - } - } - } - - return true; - - } - - if (type == Command::CommandType::MMQtimestamps) - { - MMQtimestamps cmd; - getCommand(cmd, frame); - firstMMQtimestamp = cmd.firstTimeStamp; - lastMMQtimestamp = cmd.lastTimeStamp; - return true; - } - - if (type == Command::CommandType::Rendertimestamp) - { - Rendertimestamp cmd; - getCommand(cmd, frame); - currentRenderTS = cmd.currentTimeStamp; - //LOG_ERROR<<"currentRenderTS is " <mProps); - auto ret = Module::handlePropsChange(frame, props); - mDetail->setProps(props); - return ret; -} - -bool NVRControlModule::init() -{ - if (!Module::init()) - { - return false; - } - return true; -} - -bool NVRControlModule::term() -{ - return Module::term(); -} - -NVRControlModuleProps NVRControlModule::getProps() -{ - fillProps(mDetail->mProps); - return mDetail->mProps; -} - -void NVRControlModule::setProps(NVRControlModuleProps& props) -{ - Module::addPropsToQueue(props); -} - -bool NVRControlModule::validateModuleRoles() -{ - for (int i = 0; i < pipelineModules.size(); i++) - { - bool modPresent = false; - for (auto it = moduleRoles.begin(); it != moduleRoles.end(); it++) - { - if (pipelineModules[i] == it->second) - { - modPresent = true; - } - } - if (!modPresent) - { - LOG_ERROR << "Modules and roles validation failed!!"; - } - } - return true; -} - -bool NVRControlModule::nvrRecord(bool record) -{ - NVRCommandRecord cmd; - cmd.doRecording = record; - return queueCommand(cmd); -} - -bool NVRControlModule::nvrExport(uint64_t ts, uint64_t te) -{ - NVRCommandExport cmd; - cmd.startExportTS = ts; - cmd.stopExportTS = te; - return queueCommand(cmd); -} - -bool NVRControlModule::nvrExportView(uint64_t ts, uint64_t te) -{ - NVRCommandExportView cmd; - cmd.startViewTS = ts; - cmd.stopViewTS = te; - return queueCommand(cmd); -} - -bool NVRControlModule::nvrView(bool view) -{ - NVRCommandView cmd; - cmd.doView = view; - return queueCommand(cmd); -} - -bool NVRControlModule::nvrGoLive() -{ - NVRGoLive cmd; - return queueCommand(cmd); -} \ No newline at end of file diff --git a/base/src/ThumbnailListGenerator.cpp b/base/src/ThumbnailListGenerator.cpp new file mode 100644 index 000000000..bec2b4a5c --- /dev/null +++ b/base/src/ThumbnailListGenerator.cpp @@ -0,0 +1,229 @@ +#include "ThumbnailListGenerator.h" +#include "FrameMetadata.h" +#include "ImageMetadata.h" +#include "RawImageMetadata.h" +#include "RawImagePlanarMetadata.h" +#include "FrameMetadataFactory.h" +#include "Frame.h" +#include "Logger.h" +#include +#include +#include "Utils.h" +#include +#include +#include +#include +#include +#include +#include + +#include "DMAFDWrapper.h" +#include "DMAFrameUtils.h" + +class ThumbnailListGenerator::Detail +{ + +public: + Detail(ThumbnailListGeneratorProps &_props) : mProps(_props) + { + mOutSize = cv::Size(mProps.thumbnailWidth, mProps.thumbnailHeight); + enableSOS = true; + flags.push_back(cv::IMWRITE_JPEG_QUALITY); + flags.push_back(90); + } + + ~Detail() {} + + void initMatImages(framemetadata_sp &input) + { + mIImg = Utils::getMatHeader(FrameMetadataFactory::downcast(input)); + } + + void setProps(ThumbnailListGeneratorProps &props) + { + mProps = props; + } + + cv::Mat mIImg; + cv::Size mOutSize; + bool enableSOS; + ThumbnailListGeneratorProps mProps; + int m_width; + int m_height; + int m_step; + cv::Mat m_tempImage; + int count = 0; + vector flags; +}; + +ThumbnailListGenerator::ThumbnailListGenerator(ThumbnailListGeneratorProps _props) : Module(SINK, "ThumbnailListGenerator", _props) +{ + mDetail.reset(new Detail(_props)); +} + +ThumbnailListGenerator::~ThumbnailListGenerator() {} + +bool ThumbnailListGenerator::validateInputPins() +{ + // if (getNumberOfInputPins() != 1) + // { + // LOG_ERROR << "<" << getId() << ">::validateInputPins size is expected to be 1. Actual<" << getNumberOfInputPins() << ">"; + // return false; + // } + + framemetadata_sp metadata = getFirstInputMetadata(); + FrameMetadata::FrameType frameType = metadata->getFrameType(); + if (frameType != FrameMetadata::RAW_IMAGE_PLANAR) + { + LOG_ERROR << "<" << getId() << ">::validateInputPins input frameType is expected to be RAW_IMAGE. Actual<" << frameType << ">"; + return false; + } + + return true; +} + +bool ThumbnailListGenerator::init() +{ + if (!Module::init()) + { + return false; + } + return true; +} + +bool ThumbnailListGenerator::term() +{ + return Module::term(); +} + +bool ThumbnailListGenerator::process(frame_container &frames) +{ + auto frame = getFrameByType(frames, FrameMetadata::RAW_IMAGE_PLANAR); + if (isFrameEmpty(frame)) + { + LOG_ERROR << "Got Empty Frames will return from here "; + return true; + } + + // ImagePlanes mImagePlanes; + // DMAFrameUtils::GetImagePlanes mGetImagePlanes; + // int mNumPlanes = 0; + + framemetadata_sp frameMeta = frame->getMetadata(); + + // mGetImagePlanes = DMAFrameUtils::getImagePlanesFunction(frameMeta, mImagePlanes); + // mNumPlanes = static_cast(mImagePlanes.size()); + + // mGetImagePlanes(frame, mImagePlanes); + + // uint8_t* dstPtr = (uint8_t*) malloc(frameMeta->getDataSize()); + // for (auto i = 0; i < mNumPlanes; i++) + // { + // mImagePlanes[i]->mCopyToData(mImagePlanes[i].get(), dstPtr); + // dstPtr += mImagePlanes[i]->imageSize; + // } + + // FrameMetadata::FrameType fType = frameMeta->getFrameType(); + + // uint8_t* dstPtr = (uint8_t*) malloc(frame->size()); + // auto frameSize = frame->size(); + + // dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtrY(); + // dstPtr += frameSize / 2; + // dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtrU(); + // dstPtr += frameSize / 4; + // dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtrV(); + // dstPtr += frameSize / 4; + // dstPtr -= frameSize; + + auto dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtr(); + + auto rawPlanarMetadata = FrameMetadataFactory::downcast(frameMeta); + auto height = rawPlanarMetadata->getHeight(0); + auto width = rawPlanarMetadata->getWidth(0); + LOG_ERROR << "width = "<< width; + LOG_ERROR << "height = "<< height; + auto st = rawPlanarMetadata->getStep(0); + uint8_t data = 0; + cv::Mat bgrImage; + auto yuvImage = cv::Mat(height * 1.5, width, CV_8UC1, static_cast(&data)); + yuvImage.data = static_cast(dstPtr); + cv::cvtColor(yuvImage, bgrImage, cv::COLOR_YUV2BGRA_NV12); + + cv::Mat bgrImageResized; + auto newSize = cv::Size(1000, 1000); + + cv::resize(bgrImage, bgrImageResized, newSize); + + unsigned char* frame_buffer = (unsigned char*)bgrImageResized.data; + struct jpeg_compress_struct cinfo; + struct jpeg_error_mgr jerr; + + JSAMPROW row_pointer[1]; + FILE* outfile = fopen(mDetail->mProps.fileToStore.c_str(), "wb"); + if (!outfile) + { + LOG_ERROR << "Couldn't open file" << mDetail->mProps.fileToStore.c_str(); + return false; + } + mDetail->count = mDetail->count + 1; + cinfo.err = jpeg_std_error(&jerr); + jpeg_create_compress(&cinfo); + jpeg_stdio_dest(&cinfo, outfile); + + // Set the image dimensions and color space + cinfo.image_width = 1000; + cinfo.image_height = 1000; + cinfo.input_components = 4; + cinfo.in_color_space = JCS_EXT_BGRA; + + // Set the JPEG compression parameters + jpeg_set_defaults(&cinfo); + jpeg_set_quality(&cinfo, 80, TRUE); + + // Start the compression process + jpeg_start_compress(&cinfo, TRUE); + // Loop over the image rows + while (cinfo.next_scanline < cinfo.image_height) + { + // Get a pointer to the current row + row_pointer[0] = &frame_buffer[cinfo.next_scanline * 1000 * 4]; + if (row_pointer && &cinfo) + { + // Compress the row + jpeg_write_scanlines(&cinfo, row_pointer, 1); + } + else + { + LOG_ERROR << "COULDN'T WRITE ......................................."; + } + } + + // Finish the compression process + jpeg_finish_compress(&cinfo); + + // Clean up the JPEG compression object and close the output file + jpeg_destroy_compress(&cinfo); + fclose(outfile); + LOG_ERROR << "wrote thumbail"; + return true; +} + +void ThumbnailListGenerator::setProps(ThumbnailListGeneratorProps &props) +{ + Module::addPropsToQueue(props); +} + +ThumbnailListGeneratorProps ThumbnailListGenerator::getProps() +{ + fillProps(mDetail->mProps); + return mDetail->mProps; +} + +bool ThumbnailListGenerator::handlePropsChange(frame_sp &frame) +{ + ThumbnailListGeneratorProps props(0, 0, "s"); + bool ret = Module::handlePropsChange(frame, props); + mDetail->setProps(props); + return ret; +} \ No newline at end of file diff --git a/base/test/thumbnailgenerator_tests.cpp b/base/test/thumbnailgenerator_tests.cpp new file mode 100644 index 000000000..287e50278 --- /dev/null +++ b/base/test/thumbnailgenerator_tests.cpp @@ -0,0 +1,82 @@ +#include "ThumbnailListGenerator.h" +#include "FileReaderModule.h" +#include +#include "RTSPClientSrc.h" +#include "PipeLine.h" +#include "H264Decoder.h" +#include "H264Metadata.h" +#include "test_utils.h" + +BOOST_AUTO_TEST_SUITE(thumbnailgenerator_tests) + +struct rtsp_client_tests_data { + rtsp_client_tests_data() + { + outFile = string("./data/testOutput/bunny.h264"); + Test_Utils::FileCleaner fc; + fc.pathsOfFiles.push_back(outFile); //clear any occurance before starting the tests + } + string outFile; + string empty; +}; + +BOOST_AUTO_TEST_CASE(basic) +{ + auto fileReader = boost::shared_ptr(new FileReaderModule(FileReaderModuleProps("./data/YUV_420_planar.raw"))); + auto metadata = framemetadata_sp(new RawImagePlanarMetadata(1280, 720, ImageMetadata::ImageType::YUV420, size_t(0), CV_8U)); + auto rawImagePin = fileReader->addOutputPin(metadata); + + auto m_thumbnailGenerator = boost::shared_ptr(new ThumbnailListGenerator(ThumbnailListGeneratorProps(180, 180, "./data/thumbnail.jpg"))); + fileReader->setNext(m_thumbnailGenerator); + + fileReader->init(); + m_thumbnailGenerator->init(); + + fileReader->play(true); + + fileReader->step(); + m_thumbnailGenerator->step(); + + fileReader->term(); +} + +BOOST_AUTO_TEST_CASE(basic_) +{ + rtsp_client_tests_data d; + + //drop bunny/mp4 into evostream folder, + //also set it up for RTSP client authentication as shown here: https://sites.google.com/apra.in/development/home/evostream/rtsp-authentication?authuser=1 + auto url=string("rtsp://10.102.10.77/axis-media/media.amp"); + + auto m = boost::shared_ptr(new RTSPClientSrc(RTSPClientSrcProps(url, d.empty, d.empty))); + auto meta = framemetadata_sp(new H264Metadata()); + m->addOutputPin(meta); + + auto Decoder = boost::shared_ptr(new H264Decoder(H264DecoderProps())); + m->setNext(Decoder); + + auto m_thumbnailGenerator = boost::shared_ptr(new ThumbnailListGenerator(ThumbnailListGeneratorProps(180, 180, "./data/thumbnail.jpg"))); + Decoder->setNext(m_thumbnailGenerator); + + boost::shared_ptr p; + p = boost::shared_ptr(new PipeLine("test")); + p->appendModule(m); + + if (!p->init()) + { + throw AIPException(AIP_FATAL, "Engine Pipeline init failed. Check IPEngine Logs for more details."); + } + + p->run_all_threaded(); + + Test_Utils::sleep_for_seconds(15); + + p->stop(); + p->term(); + p->wait_for_all(); + p.reset(); +} + + + +BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file From 051d04af818951919b29aae04b8cff04c71d29bd Mon Sep 17 00:00:00 2001 From: Venkat Date: Thu, 26 Oct 2023 00:24:21 -0700 Subject: [PATCH 09/19] added ifdef condition in thumbnaillistgenerator --- base/src/ThumbnailListGenerator.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/base/src/ThumbnailListGenerator.cpp b/base/src/ThumbnailListGenerator.cpp index bec2b4a5c..fda936ba4 100644 --- a/base/src/ThumbnailListGenerator.cpp +++ b/base/src/ThumbnailListGenerator.cpp @@ -17,8 +17,10 @@ #include #include +#if defined(__arm__) || defined(__aarch64__) #include "DMAFDWrapper.h" #include "DMAFrameUtils.h" +#endif class ThumbnailListGenerator::Detail { @@ -98,6 +100,7 @@ bool ThumbnailListGenerator::term() bool ThumbnailListGenerator::process(frame_container &frames) { +#if defined(__arm__) || defined(__aarch64__) auto frame = getFrameByType(frames, FrameMetadata::RAW_IMAGE_PLANAR); if (isFrameEmpty(frame)) { @@ -206,6 +209,7 @@ bool ThumbnailListGenerator::process(frame_container &frames) jpeg_destroy_compress(&cinfo); fclose(outfile); LOG_ERROR << "wrote thumbail"; +#endif return true; } From 8b53e569bae03688e786326bc7ea4f45f45f40d5 Mon Sep 17 00:00:00 2001 From: Venkat Date: Thu, 26 Oct 2023 15:11:19 +0530 Subject: [PATCH 10/19] Reverse play ApraPipes(zaki) --- base/include/BoundBuffer.h | 6 +- base/include/Command.h | 8 + base/include/H264Decoder.h | 43 ++- base/include/H264Metadata.h | 5 +- base/include/Module.h | 13 +- base/include/MultimediaQueueXform.h | 2 + base/src/H264Decoder.cpp | 492 ++++++++++++++++++++++++++-- base/src/H264DecoderV4L2Helper.cpp | 46 ++- base/src/H264DecoderV4L2Helper.h | 13 +- base/src/Module.cpp | 15 +- base/src/Mp4ReaderSource.cpp | 79 ++++- base/src/Mp4WriterSink.cpp | 4 +- base/src/Mp4WriterSinkUtils.cpp | 20 ++ base/src/MultimediaQueueXform.cpp | 142 ++++++-- base/src/OrderedCacheOfFiles.cpp | 2 - 15 files changed, 776 insertions(+), 114 deletions(-) diff --git a/base/include/BoundBuffer.h b/base/include/BoundBuffer.h index d993c35e0..78494db24 100755 --- a/base/include/BoundBuffer.h +++ b/base/include/BoundBuffer.h @@ -5,6 +5,7 @@ #include #include #include +#include using namespace boost::placeholders; template @@ -42,9 +43,10 @@ class bounded_buffer { // `param_type` represents the "best" way to pass a parameter of type `value_type` to a method. boost::mutex::scoped_lock lock(m_mutex); - //m_not_full.wait(lock, boost::bind(&bounded_buffer::is_ready_to_accept, this)); - if (is_not_full() && m_accept) + bool isCommandQueueNotFull = m_unread < m_capacity * 2; + if (m_accept && isCommandQueueNotFull) { + std::cout << "command pushed" << std::endl; m_container.push_back(item); ++m_unread; lock.unlock(); diff --git a/base/include/Command.h b/base/include/Command.h index 543cd33dd..3fbc0406e 100755 --- a/base/include/Command.h +++ b/base/include/Command.h @@ -269,6 +269,7 @@ class MultimediaQueueXformCommand : public Command int64_t startTime = 0; int64_t endTime = 0; + bool direction = true; private: friend class boost::serialization::access; @@ -278,6 +279,7 @@ class MultimediaQueueXformCommand : public Command ar& boost::serialization::base_object(*this); ar& startTime; ar& endTime; + ar& direction; } }; @@ -451,6 +453,9 @@ class NVRCommandExportView : public Command uint64_t startViewTS = 0; uint64_t stopViewTS = 0; + bool direction = true; + bool mp4ReaderExport = false; + bool onlyDirectionChange = false; private: friend class boost::serialization::access; @@ -460,6 +465,9 @@ class NVRCommandExportView : public Command ar& boost::serialization::base_object(*this); ar& startViewTS; ar& stopViewTS; + ar& direction; + ar& mp4ReaderExport; + ar& onlyDirectionChange; } }; diff --git a/base/include/H264Decoder.h b/base/include/H264Decoder.h index c35cebae5..2e8b2781a 100644 --- a/base/include/H264Decoder.h +++ b/base/include/H264Decoder.h @@ -1,11 +1,18 @@ #pragma once #include "Module.h" +#include class H264DecoderProps : public ModuleProps { public: - H264DecoderProps() {} + H264DecoderProps(uint _lowerWaterMark = 300, uint _upperWaterMark = 350) + { + lowerWaterMark = _lowerWaterMark; + upperWaterMark = _upperWaterMark; + } + uint lowerWaterMark; + uint upperWaterMark; }; class H264Decoder : public Module @@ -24,12 +31,46 @@ class H264Decoder : public Module bool validateInputPins(); bool validateOutputPins(); bool shouldTriggerSOS(); + void flushQue(); private: + void bufferDecodedFrames(frame_sp& frame); + void bufferBackwardEncodedFrames(frame_sp& frame, short naluType); + void bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short naluType); + class Detail; boost::shared_ptr mDetail; bool mShouldTriggerSOS; framemetadata_sp mOutputMetadata; std::string mOutputPinId; H264DecoderProps mProps; + + + /* Used to buffer multiple complete GOPs + note that we decode frames from this queue in reverse play*/ + std::deque> backwardGopBuffer; + /* buffers the incomplete GOP */ + std::deque latestBackwardGop; + /* It buffers only one latest GOP + used in cases where partial GOP maybe in cache and rest of the GOP needs to be decoded + note that since there is no buffering in forward play, we directly decode frames from module queue*/ + std::deque latestForwardGop; + std::map decodedFramesCache; + void sendDecodedFrame(); + bool mDirection; + bool dirChangedToFwd = false; + bool dirChangedToBwd = false; + bool foundIFrameOfReverseGop = false; + bool flushDecoderFlag = false; + bool decodePreviousFramesOfTheForwardGop = false; + bool prevFrameInCache = false; + void decodeFrameFromBwdGOP(); + std::deque incomingFramesTSQ; + void clearIncompleteBwdGopTsFromIncomingTSQ(std::deque& latestGop); + void saveSpsPps(frame_sp frame); + void* prependSpsPps(frame_sp& iFrame, size_t& spsPpsFrameSize); + void dropFarthestFromCurrentTs(uint64_t ts); + frame_sp mHeaderFrame; + boost::asio::const_buffer spsBuffer; + boost::asio::const_buffer ppsBuffer; }; diff --git a/base/include/H264Metadata.h b/base/include/H264Metadata.h index d59d6d2b6..d315f1858 100644 --- a/base/include/H264Metadata.h +++ b/base/include/H264Metadata.h @@ -40,9 +40,12 @@ class H264Metadata : public FrameMetadata width = metadata.width; height = metadata.height; + direction = metadata.direction; + mp4Seek = metadata.mp4Seek; //setDataSize(); } - + bool direction = true; + bool mp4Seek = false; protected: void initData(int _width, int _height, MemType _memType = MemType::HOST) { diff --git a/base/include/Module.h b/base/include/Module.h index 2fb5f585c..fa29ded2d 100644 --- a/base/include/Module.h +++ b/base/include/Module.h @@ -215,7 +215,7 @@ class Module { void setProps(ModuleProps& props); void fillProps(ModuleProps& props); template - void addPropsToQueue(T& props) + void addPropsToQueue(T& props, bool priority = false) { auto size = props.getSerializeSize(); auto frame = makeCommandFrame(size, mPropsChangeMetadata); @@ -225,7 +225,14 @@ class Module { // add to que frame_container frames; frames.insert(make_pair("props_change", frame)); - Module::push(frames); + if(!priority) + { + Module::push(frames); + } + else + { + Module::push_back(frames); + } } virtual bool handlePropsChange(frame_sp& frame); virtual bool handleCommand(Command::CommandType type, frame_sp& frame); @@ -247,7 +254,7 @@ class Module { Utils::deSerialize(cmd, frame->data(), frame->size()); } - bool queuePlayPauseCommand(PlayPauseCommand ppCmd); + bool queuePlayPauseCommand(PlayPauseCommand ppCmd, bool priority = false); frame_sp makeCommandFrame(size_t size, framemetadata_sp& metadata); frame_sp makeFrame(size_t size, string& pinId); frame_sp makeFrame(size_t size); // use only if 1 output pin is there diff --git a/base/include/MultimediaQueueXform.h b/base/include/MultimediaQueueXform.h index 9ab370ed6..289baadb5 100644 --- a/base/include/MultimediaQueueXform.h +++ b/base/include/MultimediaQueueXform.h @@ -61,11 +61,13 @@ class MultimediaQueueXform : public Module { uint64_t endTimeSaved = 0; uint64_t queryStartTime = 0; uint64_t queryEndTime = 0; + bool direction = true; FrameMetadata::FrameType mFrameType; using sys_clock = std::chrono::system_clock; sys_clock::time_point frame_begin; std::chrono::nanoseconds myTargetFrameLen; std::chrono::nanoseconds myNextWait; uint64_t latestFrameExportedFromHandleCmd = 0; + uint64_t latestFrameExportedFromProcess = 0; bool initDone = false; }; diff --git a/base/src/H264Decoder.cpp b/base/src/H264Decoder.cpp index 6c1dbe5b8..badd0c69e 100644 --- a/base/src/H264Decoder.cpp +++ b/base/src/H264Decoder.cpp @@ -12,6 +12,7 @@ #include "Frame.h" #include "Logger.h" #include "Utils.h" +#include "H264Utils.h" class H264Decoder::Detail { @@ -27,35 +28,45 @@ class H264Decoder::Detail bool setMetadata(framemetadata_sp& metadata, frame_sp frame, std::function send, std::function makeFrame) { - if (metadata->getFrameType() == FrameMetadata::FrameType::H264_DATA) + auto type = H264Utils::getNALUType((char*)frame->data()); + if (type == H264Utils::H264_NAL_TYPE_IDR_SLICE || type == H264Utils::H264_NAL_TYPE_SEQ_PARAM) { - sps_pps_properties p; - H264ParserUtils::parse_sps(((const char*)frame->data()) + 5, frame->size() > 5 ? frame->size() - 5 : frame->size(), &p); - mWidth = p.width; - mHeight = p.height; + if (metadata->getFrameType() == FrameMetadata::FrameType::H264_DATA) + { + sps_pps_properties p; + H264ParserUtils::parse_sps(((const char*)frame->data()) + 5, frame->size() > 5 ? frame->size() - 5 : frame->size(), &p); + mWidth = p.width; + mHeight = p.height; - auto h264Metadata = framemetadata_sp(new H264Metadata(mWidth, mHeight)); - auto rawOutMetadata = FrameMetadataFactory::downcast(h264Metadata); - rawOutMetadata->setData(*rawOutMetadata); - } + auto h264Metadata = framemetadata_sp(new H264Metadata(mWidth, mHeight)); + auto rawOutMetadata = FrameMetadataFactory::downcast(h264Metadata); + rawOutMetadata->setData(*rawOutMetadata); +#ifdef ARM64 + helper.reset(new h264DecoderV4L2Helper()); + return helper->init(send, makeFrame); +#else + helper.reset(new H264DecoderNvCodecHelper(mWidth, mHeight)); + return helper->init(send, makeFrame); +#endif + } + else + { + throw AIPException(AIP_NOTIMPLEMENTED, "Unknown frame type"); + } + } else { - throw AIPException(AIP_NOTIMPLEMENTED, "Unknown frame type"); + return false; } - -#ifdef ARM64 - helper.reset(new h264DecoderV4L2Helper()); - return helper->init(send, makeFrame); -#else - helper.reset(new H264DecoderNvCodecHelper(mWidth, mHeight)); - return helper->init(send, makeFrame);// -#endif } - void compute(frame_sp& frame) + void compute(void* inputFrameBuffer, size_t inputFrameSize, uint64_t inputFrameTS) { - helper->process(frame); + if(helper != nullptr) + { + helper->process(inputFrameBuffer, inputFrameSize, inputFrameTS); + } } #ifdef ARM64 @@ -139,7 +150,6 @@ bool H264Decoder::init() { return false; } - return true; } @@ -149,38 +159,433 @@ bool H264Decoder::term() auto eosFrame = frame_sp(new EoSFrame()); mDetail->closeAllThreads(eosFrame); #endif - mDetail.reset(); return Module::term(); } +void* H264Decoder::prependSpsPps(frame_sp& iFrame, size_t& spsPpsFrameSize) +{ + spsPpsFrameSize = iFrame->size() + spsBuffer.size() + ppsBuffer.size() + 8; + uint8_t* spsPpsFrameBuffer = new uint8_t[spsPpsFrameSize]; + char NaluSeprator[4] = { 00 ,00, 00 ,01 }; + auto nalu = reinterpret_cast(NaluSeprator); + memcpy(spsPpsFrameBuffer, nalu, 4); + spsPpsFrameBuffer += 4; + memcpy(spsPpsFrameBuffer, spsBuffer.data(), spsBuffer.size()); + spsPpsFrameBuffer += spsBuffer.size(); + memcpy(spsPpsFrameBuffer, nalu, 4); + spsPpsFrameBuffer += 4; + memcpy(spsPpsFrameBuffer, ppsBuffer.data(), ppsBuffer.size()); + spsPpsFrameBuffer += ppsBuffer.size(); + memcpy(spsPpsFrameBuffer, iFrame->data(), iFrame->size()); + spsPpsFrameBuffer = spsPpsFrameBuffer - spsBuffer.size() - ppsBuffer.size() - 8; + return spsPpsFrameBuffer; +} + +void H264Decoder::clearIncompleteBwdGopTsFromIncomingTSQ(std::deque& latestGop) +{ + while (!latestGop.empty() && !incomingFramesTSQ.empty()) + { + auto deleteItr = std::find(incomingFramesTSQ.begin(), incomingFramesTSQ.end(), latestGop.front()->timestamp); + if (deleteItr != incomingFramesTSQ.end()) + { + incomingFramesTSQ.erase(deleteItr); + latestGop.pop_front(); + } + } +} + +void H264Decoder::bufferBackwardEncodedFrames(frame_sp& frame, short naluType) +{ + if (dirChangedToBwd) + { + latestBackwardGop.clear(); + dirChangedToBwd = false; + } + // insert frames into the latest gop until I frame comes. + latestBackwardGop.emplace_back(frame); + // The latest GOP is complete when I Frame comes up, move the GOP to backwardGopBuffer where all the backward GOP's are buffered + if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + foundIFrameOfReverseGop = true; + backwardGopBuffer.push_back(std::move(latestBackwardGop)); + } +} + +void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short naluType) +{ + if (dirChangedToFwd) + { + // Whenever the direction changes to forward we just send all the backward buffered GOP's to decoded in a single step . The motive is to send the current forward frame to decoder in the same step. + while (!backwardGopBuffer.empty()) + { + decodeFrameFromBwdGOP(); + } + + // Whenever direction changes to forward , And the latestBackwardGop is incomplete , then delete the latest backward GOP and remove the frames from incomingFramesTSQ entry as well + if (!latestBackwardGop.empty()) + { + clearIncompleteBwdGopTsFromIncomingTSQ(latestBackwardGop); + } + dirChangedToFwd = false; + } + if(prevFrameInCache) + { + // previous Frame was In Cache & current is not + if (!latestForwardGop.empty()) + { + short naluTypeOfForwardGopFirstFrame = H264Utils::getNALUType((char*)latestForwardGop.front()->data()); + if (naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE || naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + // Corner case: Forward :- current frame is not part of latestForwardGOP + if (latestForwardGop.front()->timestamp > frame->timestamp) + { + latestForwardGop.clear(); + } + } + + // Corner case: Forward:- When end of cache hits while in the middle of gop, before decoding the next P frame we need decode the previous frames of that GOP. + // There might be a case where we might have cleared the decoder, in order to start the decoder again we must prepend sps and pps to I frame if not present + if (!latestForwardGop.empty() && naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) + { + auto iFrame = latestForwardGop.front(); + size_t spsPpsFrameSize; + auto spsPpsFrameBuffer = prependSpsPps(iFrame, spsPpsFrameSize); + mDetail->compute(spsPpsFrameBuffer, spsPpsFrameSize, iFrame->timestamp); + latestForwardGop.pop_front(); + for (auto itr = latestForwardGop.begin(); itr != latestForwardGop.end(); itr++) + { + if (itr->get()->timestamp < frame->timestamp) + { + mDetail->compute(itr->get()->data(), itr->get()->size(), itr->get()->timestamp); + } + } + } + else if (!latestForwardGop.empty() && naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + for (auto itr = latestForwardGop.begin(); itr != latestForwardGop.end(); itr++) + { + if (itr->get()->timestamp < frame->timestamp) + { + mDetail->compute(itr->get()->data(), itr->get()->size(), itr->get()->timestamp); + } + } + } + } + } + prevFrameInCache = false; + + /* buffer fwd GOP and send the current frame */ + // new GOP starts + if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + latestForwardGop.clear(); + } + latestForwardGop.emplace_back(frame); + + // If direction changed to forward in the middle of GOP (Even the latest gop of backward was half and not decoded) , Then we drop the P frames until next I frame. + // We also remove the entries of P frames from the incomingFramesTSQ. + short latestForwardGopFirstFrameNaluType = H264Utils::getNALUType((char*)latestForwardGop.begin()->get()->data()); + if (latestForwardGopFirstFrameNaluType != H264Utils::H264_NAL_TYPE_IDR_SLICE && latestForwardGopFirstFrameNaluType != H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + clearIncompleteBwdGopTsFromIncomingTSQ(latestForwardGop); + return; + } + + mDetail->compute(frame->data(), frame->size(), frame->timestamp); + return; +} + +void H264Decoder::decodeFrameFromBwdGOP() +{ + if (!backwardGopBuffer.empty() && H264Utils::getNALUType((char*)backwardGopBuffer.front().back()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE && prevFrameInCache) + { + auto iFrame = backwardGopBuffer.front().back(); + size_t spsPpsFrameSize; + auto spsPpsFrameBuffer = prependSpsPps(iFrame, spsPpsFrameSize); + mDetail->compute(spsPpsFrameBuffer, spsPpsFrameSize, iFrame->timestamp); + backwardGopBuffer.front().pop_back(); + prevFrameInCache = false; + } + if (!backwardGopBuffer.empty() && !backwardGopBuffer.front().empty()) + { + // For reverse play we sent the frames to the decoder in reverse, As the last frame added in the deque should be sent first (Example : P,P,P,P,P,P,I) + auto itr = backwardGopBuffer.front().rbegin(); + mDetail->compute(itr->get()->data(), itr->get()->size(), itr->get()->timestamp); + backwardGopBuffer.front().pop_back(); + } + if (backwardGopBuffer.size() >= 1 && backwardGopBuffer.front().empty()) + { + backwardGopBuffer.pop_front(); + } + if (backwardGopBuffer.empty()) + { + foundIFrameOfReverseGop = false; + } +} + +void H264Decoder::saveSpsPps(frame_sp frame) +{ + auto mFrameBuffer = const_buffer(frame->data(), frame->size()); + auto ret = H264Utils::parseNalu(mFrameBuffer); + const_buffer tempSpsBuffer; + const_buffer tempPpsBuffer; + short typeFound; + tie(typeFound, tempSpsBuffer, tempPpsBuffer) = ret; + + if ((tempSpsBuffer.size() != 0) || (tempPpsBuffer.size() != 0)) + { + mHeaderFrame = frame; + spsBuffer = tempSpsBuffer; + ppsBuffer = tempPpsBuffer; + } +} + bool H264Decoder::process(frame_container& frames) { - auto frame = frames.cbegin()->second; - mDetail->compute(frame); + auto frame = frames.begin()->second; + auto frameMetadata = frame->getMetadata(); + auto h264Metadata = FrameMetadataFactory::downcast(frameMetadata); + + if (mDirection && !h264Metadata->direction) + { + dirChangedToBwd = true; + } + else if (!mDirection && h264Metadata->direction) + { + dirChangedToFwd = true; //rename to directionChangedToFwd + } + else + { + dirChangedToBwd = false; + dirChangedToFwd = false; + } + + /* Clear the latest forward gop whenever seek happens bcz there is no buffering for fwd play. + We dont clear backwardGOP because there might be a left over GOP to be decoded. */ + if (h264Metadata->mp4Seek) + { + latestForwardGop.clear(); + } + + mDirection = h264Metadata->direction; + short naluType = H264Utils::getNALUType((char*)frame->data()); + if (naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + saveSpsPps(frame); + } + // we get a repeated frame whenever direction changes i.e. the timestamp Q latest frame is repeated + if (!incomingFramesTSQ.empty() && incomingFramesTSQ.back() == frame->timestamp) + { + flushDecoderFlag = true; + } + + //Insert the frames time stamp in TS queue. We send the frames to next modules in the same order. + incomingFramesTSQ.push_back(frame->timestamp); + + //If the frame is already present in the decoded output cache then skip the frame decoding. + if (decodedFramesCache.find(frame->timestamp) != decodedFramesCache.end()) + { + //prepend sps and pps if 1st frame is I frame + if (!backwardGopBuffer.empty() && H264Utils::getNALUType((char*)backwardGopBuffer.front().back()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE) + { + auto iFrame = backwardGopBuffer.front().back(); + size_t spsPpsFrameSize; + auto spsPpsFrameBuffer = prependSpsPps(iFrame, spsPpsFrameSize); + mDetail->compute(spsPpsFrameBuffer, spsPpsFrameSize, iFrame->timestamp); + backwardGopBuffer.front().pop_back(); + } + // the buffered GOPs in bwdGOPBuffer needs to need to be processed first + while (!backwardGopBuffer.empty()) + { + decodeFrameFromBwdGOP(); + } + + // if we seeked + if (h264Metadata->mp4Seek) + { + // flush the incomplete GOP + flushDecoderFlag = true; + clearIncompleteBwdGopTsFromIncomingTSQ(latestBackwardGop); + } + + // corner case: partial GOP already present in cache + if (!mDirection && latestBackwardGop.empty() && backwardGopBuffer.empty()) + { + auto eosFrame = frame_sp(new EmptyFrame()); + mDetail->compute(eosFrame->data(), eosFrame->size(), eosFrame->timestamp); + flushDecoderFlag = false; + } + + if (!latestBackwardGop.empty()) + { + // Corner case: backward :- (I,P,P,P) Here if first two frames are in the cache and last two frames are not in the cache , to decode the last two frames we buffer the full gop and later decode it. + bufferBackwardEncodedFrames(frame, naluType); + sendDecodedFrame(); + return true; + } + + if (mDirection && ((naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) || (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE))) + { + latestForwardGop.clear(); + latestForwardGop.push_back(frame); + } + // dont buffer fwd GOP if I frame has not been recieved (possible in intra GOP direction change cases) + else if (mDirection && !latestForwardGop.empty() && (H264Utils::getNALUType((char*)latestForwardGop.front()->data()) == H264Utils::H264_NAL_TYPE_SEQ_PARAM || H264Utils::getNALUType((char*)latestForwardGop.front()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE)) + { + flushDecoderFlag = false; + latestForwardGop.push_back(frame); + } + + // While in forward play, if cache has resumed in the middle of the GOP then to get the previous few frames we need to flush the decoder. + if (mDirection && !prevFrameInCache) + { + auto eosFrame = frame_sp(new EmptyFrame()); + mDetail->compute(eosFrame->data(), eosFrame->size(), eosFrame->timestamp); + flushDecoderFlag = false; + } + prevFrameInCache = true; + sendDecodedFrame(); + return true; + } + /* If frame is not in output cache, it needs to be buffered & decoded */ + if (mDirection) + { + //Buffers the latest GOP and send the current frame to decoder. + bufferAndDecodeForwardEncodedFrames(frame, naluType); + } + else + { + //Only buffering of backward GOP happens + bufferBackwardEncodedFrames(frame, naluType); + } + if (foundIFrameOfReverseGop) + { + // The I frame of backward GOP was found , now we send the frames to the decoder one by one in every step + decodeFrameFromBwdGOP(); + } + sendDecodedFrame(); + dropFarthestFromCurrentTs(frame->timestamp); return true; } +void H264Decoder::sendDecodedFrame() +{ + // not in output cache && flushdecoder flag is set + if (!incomingFramesTSQ.empty() && !decodedFramesCache.empty() && decodedFramesCache.find(incomingFramesTSQ.front()) == decodedFramesCache.end() && flushDecoderFlag && backwardGopBuffer.empty()) + { + // We send empty frame to the decoder , in order to flush out all the frames from decoder. + // This is to handle some cases whenever the direction change happens and to get out the latest few frames sent to decoder. + auto eosFrame = frame_sp(new EmptyFrame()); + mDetail->compute(eosFrame->data(), eosFrame->size(), eosFrame->timestamp); + flushDecoderFlag = false; + } + + // timestamp in output cache + if (!incomingFramesTSQ.empty() && !decodedFramesCache.empty() && decodedFramesCache.find(incomingFramesTSQ.front()) != decodedFramesCache.end()) + { + auto outFrame = decodedFramesCache[incomingFramesTSQ.front()]; + incomingFramesTSQ.pop_front(); + frame_container frames; + frames.insert(make_pair(mOutputPinId, outFrame)); + send(frames); + } +} + +void H264Decoder::bufferDecodedFrames(frame_sp& frame) +{ + decodedFramesCache.insert({ frame->timestamp, frame }); +} + +void H264Decoder::dropFarthestFromCurrentTs(uint64_t ts) +{ + if (decodedFramesCache.empty()) + { + return; + } + + /* dropping algo */ + int64_t begDistTS = ts - decodedFramesCache.begin()->first; + auto absBeginDistance = abs(begDistTS); + int64_t endDistTS = ts - decodedFramesCache.rbegin()->first; + auto absEndDistance = abs(endDistTS); + if (decodedFramesCache.size() >= mProps.upperWaterMark) + { + if (absEndDistance <= absBeginDistance) + { + auto itr = decodedFramesCache.begin(); + while (itr != decodedFramesCache.end()) + { + if (decodedFramesCache.size() >= mProps.lowerWaterMark) + { + boost::mutex::scoped_lock(m_mutex); + // Note - erase returns the iterator of next element after deletion. + // Dont drop the frames from cache which are present in the incomingFramesTSQ + if (std::find(incomingFramesTSQ.begin(), incomingFramesTSQ.end(), itr->first) != incomingFramesTSQ.end()) + { + itr++; + continue; + } + itr = decodedFramesCache.erase(itr); + } + else + { + return; + } + } + } + else + { + // delete from end using the fwd iterator. + auto itr = decodedFramesCache.end(); + --itr; + while (itr != decodedFramesCache.begin()) + { + if (decodedFramesCache.size() >= mProps.lowerWaterMark) + { + boost::mutex::scoped_lock(m_mutex); + // Note - erase returns the iterator of next element after deletion. + if (std::find(incomingFramesTSQ.begin(), incomingFramesTSQ.end(), itr->first) != incomingFramesTSQ.end()) + { + --itr; + continue; + } + itr = decodedFramesCache.erase(itr); + --itr; + } + else + { + return; + } + } + } + } +} + bool H264Decoder::processSOS(frame_sp& frame) { auto metadata = frame->getMetadata(); - mDetail->setMetadata(metadata, frame, + auto h264Metadata = FrameMetadataFactory::downcast(metadata); + mDirection = h264Metadata->direction; + auto ret = mDetail->setMetadata(metadata, frame, [&](frame_sp& outputFrame) { - frame_container frames; - frames.insert(make_pair(mOutputPinId, outputFrame)); - send(frames); + bufferDecodedFrames(outputFrame); }, [&]() -> frame_sp {return makeFrame(); } ); - mShouldTriggerSOS = false; - auto rawOutMetadata = FrameMetadataFactory::downcast(mOutputMetadata); + if (ret) + { + mShouldTriggerSOS = false; + auto rawOutMetadata = FrameMetadataFactory::downcast(mOutputMetadata); #ifdef ARM64 - RawImagePlanarMetadata OutputMetadata(mDetail->mWidth, mDetail->mHeight, ImageMetadata::ImageType::NV12, 128, CV_8U, FrameMetadata::MemType::DMABUF); + RawImagePlanarMetadata OutputMetadata(mDetail->mWidth, mDetail->mHeight, ImageMetadata::ImageType::NV12, 128, CV_8U, FrameMetadata::MemType::DMABUF); #else - RawImagePlanarMetadata OutputMetadata(mDetail->mWidth, mDetail->mHeight, ImageMetadata::YUV420, size_t(0), CV_8U, FrameMetadata::HOST); + RawImagePlanarMetadata OutputMetadata(mDetail->mWidth, mDetail->mHeight, ImageMetadata::YUV420, size_t(0), CV_8U, FrameMetadata::HOST); #endif - rawOutMetadata->setData(OutputMetadata); + rawOutMetadata->setData(OutputMetadata); + } + return true; } @@ -192,7 +597,24 @@ bool H264Decoder::shouldTriggerSOS() bool H264Decoder::processEOS(string& pinId) { auto frame = frame_sp(new EmptyFrame()); - mDetail->compute(frame); - mShouldTriggerSOS = true; + mDetail->compute(frame->data(), frame->size(), frame->timestamp); + LOG_ERROR << "processes sos " ; + //mShouldTriggerSOS = true; return true; +} + +void H264Decoder::flushQue() +{ + if (!incomingFramesTSQ.empty()) + { + LOG_ERROR << "clearing decoder cache and clear ts = " << incomingFramesTSQ.size(); + incomingFramesTSQ.clear(); + latestBackwardGop.clear(); + latestForwardGop.clear(); + backwardGopBuffer.clear(); + auto frame = frame_sp(new EmptyFrame()); + LOG_ERROR << "does it compute"; + mDetail->compute(frame->data(), frame->size(), frame->timestamp); + LOG_ERROR << " cleared decoder cache " << incomingFramesTSQ.size(); + } } \ No newline at end of file diff --git a/base/src/H264DecoderV4L2Helper.cpp b/base/src/H264DecoderV4L2Helper.cpp index 47cbb74b9..d976464ac 100644 --- a/base/src/H264DecoderV4L2Helper.cpp +++ b/base/src/H264DecoderV4L2Helper.cpp @@ -282,10 +282,10 @@ Buffer::fill_buffer_plane_format(uint32_t *num_planes, return 0; } -void h264DecoderV4L2Helper::read_input_chunk_frame_sp(frame_sp inpFrame, Buffer * buffer) +void h264DecoderV4L2Helper::read_input_chunk_frame_sp(void* inputFrameBuffer, size_t inputFrameSize, Buffer * buffer) { - memcpy(buffer->planes[0].data,inpFrame->data(),inpFrame->size()); - buffer->planes[0].bytesused = static_cast(inpFrame->size()); + memcpy(buffer->planes[0].data,inputFrameBuffer,inputFrameSize); + buffer->planes[0].bytesused = static_cast(inputFrameSize); } /** @@ -303,7 +303,7 @@ void h264DecoderV4L2Helper::read_input_chunk_frame_sp(frame_sp inpFrame, Buffer * memory-mapped virtual address of the plane with the access * pointed by the flag into the void data-pointer. * Before the mapped memory is accessed, a call to NvBufferMemSyncForCpu() -* with the virtual address returned must be present before any access is made + * with the virtual address returned must be present before any access is made * by the CPU to the buffer. * * After reading the data, the memory-mapped virtual address of the @@ -315,8 +315,9 @@ void h264DecoderV4L2Helper::read_input_chunk_frame_sp(frame_sp inpFrame, Buffer { return -1; } - outputFrame->timestamp = incomingTimeStamp.front(); - incomingTimeStamp.pop(); + outputFrame->timestamp = framesTimestampEntry.front(); + framesTimestampEntry.pop(); + send(outputFrame); return 0; @@ -638,8 +639,6 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) ** Format and buffers are now set on capture. */ - - if (!ctx->in_error) { m_nThread->query_set_capture(ctx); @@ -727,7 +726,9 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) /* Blocklinear to Pitch transformation is required ** to dump the raw decoded buffer data. */ + auto outputFrame = m_nThread->makeFrame(); + auto dmaOutFrame = static_cast(outputFrame->data()); int f_d = dmaOutFrame->getFd(); ret_val = NvBufferTransform(decoded_buffer->planes[0].fd,f_d, &transform_params); @@ -783,7 +784,7 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) return NULL; } - bool h264DecoderV4L2Helper::decode_process(context_t& ctx, frame_sp frame) + bool h264DecoderV4L2Helper::decode_process(context_t& ctx, void* inputFrameBuffer, size_t inputFrameSize) { bool allow_DQ = true; int ret_val; @@ -823,7 +824,7 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) if (ctx.decode_pixfmt == V4L2_PIX_FMT_H264) { - read_input_chunk_frame_sp(frame, buffer); + read_input_chunk_frame_sp(inputFrameBuffer, inputFrameSize, buffer); } else { @@ -1132,6 +1133,10 @@ bool h264DecoderV4L2Helper::init(std::function _send, std::func makeFrame = _makeFrame; mBuffer.reset(new Buffer()); send = _send; + return initializeDecoder(); +} +bool h264DecoderV4L2Helper::initializeDecoder() +{ int flags = 0; struct v4l2_capability caps; struct v4l2_buffer op_v4l2_buf; @@ -1303,11 +1308,22 @@ bool h264DecoderV4L2Helper::init(std::function _send, std::func typedef void * (*THREADFUNCPTR)(void *); pthread_create(&ctx.dec_capture_thread, NULL,h264DecoderV4L2Helper::capture_thread, (void *) (this)); + + return true; } -int h264DecoderV4L2Helper::process(frame_sp inputFrame) +int h264DecoderV4L2Helper::process(void* inputFrameBuffer, size_t inputFrameSize, uint64_t inputFrameTS) { uint32_t idx = 0; - incomingTimeStamp.push(inputFrame->timestamp); + if(inputFrameSize) + framesTimestampEntry.push(inputFrameTS); + + if(inputFrameSize && ctx.eos && ctx.got_eos) + { + ctx.eos = false; + ctx.got_eos = false; + initializeDecoder(); + } + while (!ctx.eos && !ctx.in_error && idx < ctx.op_num_buffers) { struct v4l2_buffer queue_v4l2_buf_op; @@ -1320,7 +1336,7 @@ int h264DecoderV4L2Helper::process(frame_sp inputFrame) buffer = ctx.op_buffers[idx]; if (ctx.decode_pixfmt == V4L2_PIX_FMT_H264) { - read_input_chunk_frame_sp(inputFrame, buffer); + read_input_chunk_frame_sp(inputFrameBuffer, inputFrameSize, buffer); } else { @@ -1353,7 +1369,7 @@ int h264DecoderV4L2Helper::process(frame_sp inputFrame) } // Dequeue and queue loop on output plane. - ctx.eos = decode_process(ctx,inputFrame); + ctx.eos = decode_process(ctx,inputFrameBuffer, inputFrameSize); /* For blocking mode, after getting EOS on output plane, ** dequeue all the queued buffers on output plane. @@ -1389,7 +1405,7 @@ int h264DecoderV4L2Helper::process(frame_sp inputFrame) } void h264DecoderV4L2Helper::closeAllThreads(frame_sp eosFrame) { - process(eosFrame); + process(eosFrame->data(), eosFrame->size(), 0); if (ctx.fd != -1) { if (ctx.dec_capture_thread) diff --git a/base/src/H264DecoderV4L2Helper.h b/base/src/H264DecoderV4L2Helper.h index b7abd67ab..3a556b06b 100644 --- a/base/src/H264DecoderV4L2Helper.h +++ b/base/src/H264DecoderV4L2Helper.h @@ -40,6 +40,7 @@ #include "Frame.h" #include #include +#include /** * @brief Class representing a buffer. @@ -192,7 +193,7 @@ class h264DecoderV4L2Helper * @param[in] stream Input stream * @param[in] buffer Buffer class pointer */ - void read_input_chunk_frame_sp(frame_sp inpFrame, Buffer *buffer); + void read_input_chunk_frame_sp(void* inputFrameBuffer, size_t inputFrameSize, Buffer *buffer); /** * @brief Writes a plane data of the buffer to a file. @@ -228,7 +229,7 @@ class h264DecoderV4L2Helper * * @param[in] ctx Pointer to the decoder context struct created. */ - void query_set_capture(context_t *ctx, int &fd); + void query_set_capture(context_t *ctx); /** * @brief Callback function on capture thread. @@ -257,7 +258,7 @@ class h264DecoderV4L2Helper * EOS is detected by the decoder and all the buffers are dequeued; * else the decode process continues running. */ - bool decode_process(context_t &ctx, frame_sp frame); + bool decode_process(context_t &ctx, void* inputFrameBuffer, size_t inputFrameSize); /** * @brief Dequeues an event. @@ -381,10 +382,12 @@ class h264DecoderV4L2Helper */ int subscribe_event(int fd, uint32_t type, uint32_t id, uint32_t flags); - int process(frame_sp inputFrame); + int process(void* inputFrameBuffer, size_t inputFrameSize, uint64_t inputFrameTS); bool init(std::function send, std::function makeFrame); + bool initializeDecoder(); + void closeAllThreads(frame_sp eosFrame); protected: boost::shared_ptr mBuffer; @@ -392,5 +395,5 @@ class h264DecoderV4L2Helper std::function makeFrame; std::function send; int ret = 0; - std::queue incomingTimeStamp; + std::queue framesTimestampEntry; }; diff --git a/base/src/Module.cpp b/base/src/Module.cpp index 86a5b9b0b..c8cab7e51 100644 --- a/base/src/Module.cpp +++ b/base/src/Module.cpp @@ -1037,7 +1037,7 @@ bool Module::shouldTriggerSOS() return false; } -bool Module::queuePlayPauseCommand(PlayPauseCommand ppCmd) +bool Module::queuePlayPauseCommand(PlayPauseCommand ppCmd, bool priority) { auto metadata = framemetadata_sp(new PausePlayMetadata()); auto frame = makeCommandFrame(ppCmd.getSerializeSize(), metadata); @@ -1046,10 +1046,17 @@ bool Module::queuePlayPauseCommand(PlayPauseCommand ppCmd) // add to que frame_container frames; frames.insert(make_pair("pause_play", frame)); - if (!Module::try_push(frames)) + if (!priority) { - LOG_ERROR << "failed to push play command to the que"; - return false; + if (!Module::try_push(frames)) + { + LOG_ERROR << "failed to push play command to the que"; + return false; + } + } + else + { + Module::push_back(frames); } return true; } diff --git a/base/src/Mp4ReaderSource.cpp b/base/src/Mp4ReaderSource.cpp index efab93ad9..5052999e9 100644 --- a/base/src/Mp4ReaderSource.cpp +++ b/base/src/Mp4ReaderSource.cpp @@ -82,6 +82,7 @@ class Mp4ReaderDetailAbs mState.direction = props.direction; mState.mVideoPath = videoPath; mProps = props; + mState.end = false; } void setProps(Mp4ReaderSourceProps& props) @@ -256,6 +257,8 @@ class Mp4ReaderDetailAbs } LOG_TRACE << "changed direction frameIdx <" << mState.mFrameCounterIdx << "> totalFrames <" << mState.mFramesInVideo << ">"; mp4_demux_toggle_playback(mState.demux, mState.video.id); + mDirection = _direction; + setMetadata(); } } @@ -371,6 +374,7 @@ class Mp4ReaderDetailAbs } // no files left to read OR no new files even after fresh parse OR empty folder + if (mState.end) { LOG_INFO << "Reached EOF end state in playback."; @@ -380,6 +384,7 @@ class Mp4ReaderDetailAbs mState.end = false; return true; } + // reload the current file if (waitFlag) { @@ -491,6 +496,7 @@ class Mp4ReaderDetailAbs mState.mFramesInVideo = mState.info.sample_count; mWidth = mState.info.video_width; mHeight = mState.info.video_height; + mDirection = mState.direction; mDurationInSecs = mState.info.duration / mState.info.timescale; mFPS = mState.mFramesInVideo / mDurationInSecs; } @@ -612,6 +618,8 @@ class Mp4ReaderDetailAbs // reset flags waitFlag = false; sentEOSSignal = false; + isMp4SeekFrame = true; + setMetadata(); return true; } @@ -623,6 +631,31 @@ class Mp4ReaderDetailAbs */ std::string skipVideoFile; uint64_t skipMsecsInFile; + if (!isVideoFileFound) + { + if (!cof->probe(boost::filesystem::path(mState.mVideoPath), mState.mVideoPath)) + { + return false; + } + isVideoFileFound = true; + } + if (mProps.parseFS) + { + auto boostVideoTS = boost::filesystem::path(mState.mVideoPath).stem().string(); + uint64_t start_parsing_ts = 0; + try + { + start_parsing_ts = std::stoull(boostVideoTS); + } + catch (std::invalid_argument) + { + auto msg = "Video File name not in proper format.Check the filename sent as props. \ + If you want to read a file with custom name instead, please disable parseFS flag."; + LOG_ERROR << msg; + throw AIPException(AIP_FATAL, msg); + } + cof->parseFiles(start_parsing_ts, mState.direction, true, false); // enable exactMatch, dont disable disableBatchSizeCheck + } bool ret = cof->getRandomSeekFile(skipTS, mState.direction, skipMsecsInFile, skipVideoFile); if (!ret) { @@ -673,6 +706,9 @@ class Mp4ReaderDetailAbs waitFlag = false; // prependSpsPps mState.shouldPrependSpsPps = true; + isMp4SeekFrame = true; + setMetadata(); + LOG_ERROR << "seek successfull"; return true; } @@ -773,7 +809,7 @@ class Mp4ReaderDetailAbs currentTS = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count(); if (currentTS >= recheckDiskTS) { - if (!cof->probe(boost::filesystem::path(mState.mVideoPath), mState.mVideoPath)); + if (!cof->probe(boost::filesystem::path(mState.mVideoPath), mState.mVideoPath)) { imgFrame = nullptr; imageFrameSize = 0; @@ -976,11 +1012,11 @@ class Mp4ReaderDetailAbs std::string mVideoPath = ""; int32_t mFrameCounterIdx; bool shouldPrependSpsPps = false; + bool foundFirstReverseIFrame = false; bool end = false; Mp4ReaderSourceProps props; float speed; bool direction; - //bool end; } mState; uint64_t openVideoStartingTS = 0; uint64_t reloadFileAfter = 0; @@ -993,6 +1029,7 @@ class Mp4ReaderDetailAbs uint64_t recheckDiskTS = 0; boost::shared_ptr cof; framemetadata_sp updatedEncodedImgMetadata; + framemetadata_sp mH264Metadata; /* mState.end = true is possible only in two cases: - if parseFS found no more relevant files on the disk @@ -1001,6 +1038,8 @@ class Mp4ReaderDetailAbs public: int mWidth = 0; int mHeight = 0; + bool mDirection; + bool isMp4SeekFrame = false; int ret; double mFPS = 0; double mDurationInSecs = 0; @@ -1057,12 +1096,6 @@ void Mp4ReaderDetailJpeg::setMetadata() } auto encodedMetadata = FrameMetadataFactory::downcast(metadata); encodedMetadata->setData(*encodedMetadata); - - auto mp4FrameMetadata = framemetadata_sp(new Mp4VideoMetadata("v_1_0")); - // set proto version in mp4videometadata - auto serFormatVersion = getSerFormatVersion(); - auto mp4VideoMetadata = FrameMetadataFactory::downcast(mp4FrameMetadata); - mp4VideoMetadata->setData(serFormatVersion); Mp4ReaderDetailAbs::setMetadata(); // set at Module level mSetMetadata(encodedImagePinId, metadata); @@ -1141,17 +1174,21 @@ bool Mp4ReaderDetailJpeg::produceFrames(frame_container& frames) void Mp4ReaderDetailH264::setMetadata() { - auto metadata = framemetadata_sp(new H264Metadata(mWidth, mHeight)); - if (!metadata->isSet()) + mH264Metadata = framemetadata_sp(new H264Metadata(mWidth, mHeight)); + + if (!mH264Metadata->isSet()) { return; } - auto h264Metadata = FrameMetadataFactory::downcast(metadata); + auto h264Metadata = FrameMetadataFactory::downcast(mH264Metadata); + h264Metadata->direction = mDirection; + h264Metadata->mp4Seek = isMp4SeekFrame; h264Metadata->setData(*h264Metadata); readSPSPPS(); + Mp4ReaderDetailAbs::setMetadata(); - mSetMetadata(h264ImagePinId, metadata); + mSetMetadata(h264ImagePinId, mH264Metadata); return; } @@ -1231,7 +1268,7 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) return true; } - if (mState.shouldPrependSpsPps) + if (mState.shouldPrependSpsPps || (!mState.direction && !mState.foundFirstReverseIFrame)) { boost::asio::mutable_buffer tmpBuffer(imgFrame->data(), imgFrame->size()); auto type = H264Utils::getNALUType((char*)tmpBuffer.data()); @@ -1244,11 +1281,13 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) memcpy(tempFrameBuffer, imgFrame->data(), imgSize); imgSize += spsSize + ppsSize + 8; imgFrame = tempFrame; + mState.foundFirstReverseIFrame = true; mState.shouldPrependSpsPps = false; } - else if(type == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + else if (type == H264Utils::H264_NAL_TYPE_SEQ_PARAM) { mState.shouldPrependSpsPps = false; + mState.foundFirstReverseIFrame = true; } } @@ -1260,7 +1299,6 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) { frameData[3] = 0x1; frameData[spsSize + 7] = 0x1; - frameData[spsSize + ppsSize + 8] = 0x0; frameData[spsSize + ppsSize + 9] = 0x0; frameData[spsSize + ppsSize + 10] = 0x0; frameData[spsSize + ppsSize + 11] = 0x1; @@ -1307,6 +1345,11 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) } frames.insert(make_pair(metadataFramePinId, trimmedMetadataFrame)); } + if (isMp4SeekFrame) + { + isMp4SeekFrame = false; + setMetadata(); + } return true; } @@ -1319,6 +1362,7 @@ Mp4ReaderSource::~Mp4ReaderSource() {} bool Mp4ReaderSource::init() { + LOG_ERROR<<"MP4READER INIT!!!!!!"; if (!Module::init()) { return false; @@ -1355,6 +1399,7 @@ bool Mp4ReaderSource::init() mDetail->encodedImagePinId = encodedImagePinId; mDetail->h264ImagePinId = h264ImagePinId; mDetail->metadataFramePinId = metadataFramePinId; + LOG_ERROR<<"MP4READER INIT ENNND!!!!!!"; return mDetail->Init(); } @@ -1510,7 +1555,9 @@ bool Mp4ReaderSource::handleCommand(Command::CommandType type, frame_sp& frame) { Mp4SeekCommand seekCmd; getCommand(seekCmd, frame); + LOG_ERROR<<"seek play 1 "; return mDetail->randomSeek(seekCmd.seekStartTS, seekCmd.forceReopen); + LOG_ERROR<<"seek play 2 "; } else { @@ -1520,8 +1567,10 @@ bool Mp4ReaderSource::handleCommand(Command::CommandType type, frame_sp& frame) bool Mp4ReaderSource::handlePausePlay(float speed, bool direction) { + LOG_ERROR<<"hanlde play 1 "; mDetail->setPlayback(speed, direction); return Module::handlePausePlay(speed, direction); + LOG_ERROR<<"hanlde play 2 "; } bool Mp4ReaderSource::randomSeek(uint64_t skipTS, bool forceReopen) diff --git a/base/src/Mp4WriterSink.cpp b/base/src/Mp4WriterSink.cpp index ab2aadd48..277032ea9 100644 --- a/base/src/Mp4WriterSink.cpp +++ b/base/src/Mp4WriterSink.cpp @@ -594,7 +594,7 @@ bool Mp4WriterSink::init() bool Mp4WriterSink::validateInputOutputPins() { - if (getNumberOfInputsByType(FrameMetadata::H264_DATA) != 1 && getNumberOfInputsByType(FrameMetadata::ENCODED_IMAGE) != 1) + if (getNumberOfInputsByType(FrameMetadata::H264_DATA) != 1 && getNumberOfInputsByType(FrameMetadata::ENCODED_IMAGE) >= 1) { LOG_ERROR << "<" << getId() << ">::validateInputOutputPins expected 1 pin of ENCODED_IMAGE. Actual<" << getNumberOfInputPins() << ">"; return false; @@ -729,5 +729,5 @@ bool Mp4WriterSink::handlePropsChange(frame_sp& frame) void Mp4WriterSink::setProps(Mp4WriterSinkProps& props) { - Module::addPropsToQueue(props); + Module::addPropsToQueue(props, true); } \ No newline at end of file diff --git a/base/src/Mp4WriterSinkUtils.cpp b/base/src/Mp4WriterSinkUtils.cpp index 2430b31f9..fb7dd964c 100644 --- a/base/src/Mp4WriterSinkUtils.cpp +++ b/base/src/Mp4WriterSinkUtils.cpp @@ -173,6 +173,26 @@ void Mp4WriterSinkUtils::parseTSH264(uint64_t& ts, uint32_t& chunkTimeInMinutes, { syncFlag = false; } + + if (boost::filesystem::extension(baseFolder) == ".mp4") + { + if(currentFolder != baseFolder) + { + if(naluType == H264Utils::H264_NAL_TYPE::H264_NAL_TYPE_IDR_SLICE || naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + currentFolder = baseFolder; + } + else + { + return; + } + } + if(currentFolder == baseFolder) + { + customNamedFileDirCheck(baseFolder, chunkTimeInMinutes, relPath, nextFrameFileName); + return; + } + } // used cached values if the difference in ts is less than chunkTime uint32_t chunkTimeInSecs = 60 * chunkTimeInMinutes; if ((t - lastVideoTS) < chunkTimeInSecs && currentFolder == baseFolder)// && chunkTimeInMinutes != UINT32_MAX diff --git a/base/src/MultimediaQueueXform.cpp b/base/src/MultimediaQueueXform.cpp index 7ece1f372..ace947505 100644 --- a/base/src/MultimediaQueueXform.cpp +++ b/base/src/MultimediaQueueXform.cpp @@ -629,27 +629,24 @@ void MultimediaQueueXform::setState(uint64_t tStart, uint64_t tEnd) void MultimediaQueueXform::extractFramesAndEnqueue(boost::shared_ptr& frameQueue) { //loop over frame container - auto frames = frameQueue->pop(); - for (auto itr = frames.begin(); itr != frames.end(); itr++) + if (frameQueue->size()) { - if (itr->second->isCommand()) + frame_container framesContainer; + auto frames = frameQueue->pop(); + for (auto itr = frames.begin(); itr != frames.end(); itr++) { - auto cmdType = NoneCommand::getCommandType(itr->second->data(), itr->second->size()); - if(cmdType == Command::CommandType::Relay || cmdType == Command::CommandType::MultimediaQueueXform) + if (itr->second->isCommand()) { + auto cmdType = NoneCommand::getCommandType(itr->second->data(), itr->second->size()); handleCommand(cmdType, itr->second); } else { - frame_container commandFrame; - commandFrame.insert(make_pair(itr->first, itr->second)); - frameQueue->push_back(commandFrame); + framesContainer.insert(make_pair(itr->first, itr->second)); } } - else + if (!framesContainer.empty()) { - frame_container framesContainer; - framesContainer.insert(make_pair(itr->first, itr->second)); mState->queueObject->enqueue(framesContainer, pushToNextModule); } } @@ -664,6 +661,7 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr { myTargetFrameLen = std::chrono::nanoseconds(1000000000 / 22); initDone = false; + LOG_ERROR << "command received"; if (type == Command::CommandType::MultimediaQueueXform) { MultimediaQueueXformCommand cmd; @@ -673,14 +671,29 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr startTimeSaved = cmd.startTime; queryEndTime = cmd.endTime; endTimeSaved = cmd.endTime; - + direction = cmd.direction; + LOG_ERROR << "start time = " << cmd.startTime; + LOG_ERROR << "end time = " << cmd.endTime; + LOG_ERROR << "direction = " << cmd.direction; + LOG_ERROR << "state = " << mState->Type; + LOG_ERROR << "mmq begin ts = " << mState->queueObject->mQueue.begin()->first; bool reset = false; pushToNextModule = true; if (mState->Type == State::EXPORT) { mState->handleExport(queryStartTime, queryEndTime, reset, mState->queueObject->mQueue, endTimeSaved); - for (auto it = mState->queueObject->mQueue.begin(); it != mState->queueObject->mQueue.end(); it++) + State::mQueueMap::iterator it; + if (direction) + { + it = mState->queueObject->mQueue.begin(); + } + else + { + it = mState->queueObject->mQueue.end(); + it--; + } + while (!mState->queueObject->mQueue.empty() )//&& it != mState->queueObject->mQueue.end() { if (((it->first) >= queryStartTime) && (((it->first) <= queryEndTime))) { @@ -694,23 +707,15 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr } else { - auto moduleQueue = getQue(); - if(moduleQueue->size()) - { - extractFramesAndEnqueue(moduleQueue); - } if (!initDone) { myNextWait = myTargetFrameLen; frame_begin = sys_clock::now(); initDone = true; } - - //LOG_ERROR << "multimediaQueueSize = " << queueSize; frame_container outFrames; - auto outputId = Module::getOutputPinIdByType(FrameMetadata::RAW_IMAGE_PLANAR); + auto outputId = Module::getOutputPinIdByType(FrameMetadata::RAW_IMAGE_PLANAR); outFrames.insert(make_pair(outputId, it->second.begin()->second)); - //LOG_ERROR<<"sENDING FROM HANDLE COMMAND AT TIME "<< it->first; mState->exportSend(outFrames); latestFrameExportedFromHandleCmd = it->first; std::chrono::nanoseconds frame_len = sys_clock::now() - frame_begin; @@ -720,6 +725,44 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr } myNextWait += myTargetFrameLen; } + if (!((!direction && it == mState->queueObject->mQueue.begin()) || (direction && it == mState->queueObject->mQueue.end()))) + { + //LOG_ERROR << "enque frames"; + auto moduleQueue = getQue(); + extractFramesAndEnqueue(moduleQueue); + } + } + if (direction) + { + it++; + if (it == mState->queueObject->mQueue.end()) + { + break; + } + } + else + { + if (it != mState->queueObject->mQueue.end()) + { + it--; + } + if (it == mState->queueObject->mQueue.end()) + { + if (mState->Type != State::IDLE) + { + NVRCommandExportView cmd; + cmd.startViewTS = latestFrameExportedFromHandleCmd; + cmd.stopViewTS = 0; + cmd.direction = direction; + cmd.mp4ReaderExport = true; + controlModule->queueCommand(cmd, true); + LOG_ERROR << "crashing here?" ; + LOG_ERROR << "state = " << mState->Type; + } + mState->Type = State::IDLE; + LOG_ERROR << "first frame of handle command = " << latestFrameExportedFromHandleCmd; + break; + } } } } @@ -744,6 +787,7 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr setState(queryStartTime, queryEndTime); } return true; + LOG_ERROR << "export frames done"; } LOG_ERROR <<"RELAY COMMAND WAS HERE"; return Module::handleCommand(type, frame); @@ -781,7 +825,17 @@ bool MultimediaQueueXform::process(frame_container& frames) { mState->isProcessCall = true; mState->handleExport(queryStartTime, queryEndTime, reset, mState->queueObject->mQueue, endTimeSaved); - for (auto it = mState->queueObject->mQueue.begin(); it != mState->queueObject->mQueue.end(); it++) + State::mQueueMap::iterator it; + if (direction) + { + it = mState->queueObject->mQueue.begin(); + } + else + { + it = mState->queueObject->mQueue.end(); + it--; + } + while (!mState->queueObject->mQueue.empty())//&& it != mState->queueObject->mQueue.end() { if (((it->first) >= (queryStartTime + 1)) && (((it->first) <= (endTimeSaved)))) { @@ -795,11 +849,6 @@ bool MultimediaQueueXform::process(frame_container& frames) } else { - auto moduleQueue = getQue(); - if(moduleQueue->size()) - { - extractFramesAndEnqueue(moduleQueue); - } if (!initDone) { myNextWait = myTargetFrameLen; @@ -813,6 +862,7 @@ bool MultimediaQueueXform::process(frame_container& frames) outFrames.insert(make_pair(outputId, it->second.begin()->second)); //LOG_ERROR<<"sENDING FROM PROCESS AT TIME "<< it->first; mState->exportSend(outFrames); + latestFrameExportedFromProcess = it->first; std::chrono::nanoseconds frame_len = sys_clock::now() - frame_begin; if (myNextWait > frame_len) { @@ -821,8 +871,42 @@ bool MultimediaQueueXform::process(frame_container& frames) } myNextWait += myTargetFrameLen; } + if (!(!direction && it == mState->queueObject->mQueue.begin())) + { + auto moduleQueue = getQue(); + extractFramesAndEnqueue(moduleQueue); + } + } + if (direction) + { + it++; + if (it == mState->queueObject->mQueue.end()) + { + break; + } + } + else + { + if (it != mState->queueObject->mQueue.end()) + { + it--; + } + if (it == mState->queueObject->mQueue.end()) + { + if (mState->Type != State::IDLE) + { + NVRCommandExportView cmd; + cmd.startViewTS = latestFrameExportedFromProcess; + cmd.stopViewTS = 0; + cmd.direction = direction; + cmd.mp4ReaderExport = true; + controlModule->queueCommand(cmd, true); + } + mState->Type = State::IDLE; + LOG_ERROR << "first frame of process = " << latestFrameExportedFromProcess; + break; + } } - } } diff --git a/base/src/OrderedCacheOfFiles.cpp b/base/src/OrderedCacheOfFiles.cpp index 6296e7699..3beb230ce 100644 --- a/base/src/OrderedCacheOfFiles.cpp +++ b/base/src/OrderedCacheOfFiles.cpp @@ -561,8 +561,6 @@ std::vector OrderedCacheOfFiles::parseAndSortDateDir(co { std::vector dateDir; fs::directory_iterator dateDirIter(rootDir), dateDirEndIter; - LOG_INFO << "parsing files from dir <" << *dateDirIter << ">"; - for (dateDirIter; dateDirIter != dateDirEndIter; ++dateDirIter) { if (fs::is_directory(dateDirIter->path())) From e1a75023a0fc25648a4db160b1352c86a082c703 Mon Sep 17 00:00:00 2001 From: Venkat Date: Fri, 27 Oct 2023 00:15:48 -0700 Subject: [PATCH 11/19] H264DecoderNvCodecHelper changes --- base/src/H264DecoderNvCodecHelper.cpp | 21 ++++++++++++--------- base/src/H264DecoderNvCodecHelper.h | 4 +++- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/base/src/H264DecoderNvCodecHelper.cpp b/base/src/H264DecoderNvCodecHelper.cpp index f90af9231..562d6326b 100644 --- a/base/src/H264DecoderNvCodecHelper.cpp +++ b/base/src/H264DecoderNvCodecHelper.cpp @@ -1,4 +1,3 @@ - #pragma once #include #include @@ -711,7 +710,7 @@ bool H264DecoderNvCodecHelper::init(std::function _send, std::f { makeFrame = _makeFrame; send = _send; - return false; + return true; } void H264DecoderNvCodecHelper::ConvertToPlanar(uint8_t* pHostFrame, int nWidth, int nHeight, int nBitDepth) { @@ -731,15 +730,17 @@ void H264DecoderNvCodecHelper::ConvertToPlanar(uint8_t* pHostFrame, int nWidth, } } -void H264DecoderNvCodecHelper::process(frame_sp& frame) +void H264DecoderNvCodecHelper::process(void* inputFrameBuffer, size_t inputFrameSize, uint64_t inputFrameTS) { + if(inputFrameSize) + framesTimestampEntry.push(inputFrameTS); uint8_t* inputBuffer = NULL; int inputBufferSize = 0; - frame_sp outputFrame = makeFrame(); - uint8_t** outBuffer = reinterpret_cast(outputFrame->data()); + frame_sp outputFrame; + uint8_t** outBuffer; - inputBuffer = static_cast(frame->data()); - inputBufferSize = frame->size(); + inputBuffer = static_cast(inputFrameBuffer); + inputBufferSize = inputFrameSize; int nFrameReturned = 0, nFrame = 0; bool bOutPlanar = true; @@ -749,10 +750,12 @@ void H264DecoderNvCodecHelper::process(frame_sp& frame) for (int i = 0; i < nFrameReturned; i++) { ConvertToPlanar(outBuffer[i], helper->GetWidth(), helper->GetHeight(), helper->GetBitDepth()); - + outputFrame = makeFrame(); + outputFrame->timestamp = framesTimestampEntry.front(); + framesTimestampEntry.pop(); memcpy(outputFrame->data(), outBuffer[i], outputFrame->size()); send(outputFrame); } return; -} +} \ No newline at end of file diff --git a/base/src/H264DecoderNvCodecHelper.h b/base/src/H264DecoderNvCodecHelper.h index f7e117d5d..4ed30803e 100644 --- a/base/src/H264DecoderNvCodecHelper.h +++ b/base/src/H264DecoderNvCodecHelper.h @@ -12,6 +12,7 @@ #include #include "CommonDefs.h" #include "CudaCommon.h" +#include /** * @brief Exception class for error reporting from the decode API. @@ -237,9 +238,10 @@ class H264DecoderNvCodecHelper : public NvDecoder bool init(std::function send, std::function makeFrame); void ConvertToPlanar(uint8_t* pHostFrame, int nWidth, int nHeight, int nBitDepth); - void process(frame_sp& frame); + void process(void* inputFrameBuffer, size_t inputFrameSize, uint64_t inputFrameTS); std::function send; std::function makeFrame; private: boost::shared_ptr helper; + std::queue framesTimestampEntry; }; \ No newline at end of file From f35abe98bb12a3cacf84405796fa16c578f6e9f2 Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Tue, 7 Nov 2023 18:47:40 +0530 Subject: [PATCH 12/19] aprapipes sprint 6 changes --- base/include/ApraNvEglRenderer.h | 16 ++- base/include/Command.h | 27 +++- base/src/ImageViewerModule.cpp | 6 +- base/src/Module.cpp | 13 +- base/src/Mp4ReaderSource.cpp | 4 + base/src/Mp4WriterSink.cpp | 1 + base/src/NvEglRenderer.cpp | 208 +++++++++++++++++++++++----- base/src/ThumbnailListGenerator.cpp | 58 +++----- 8 files changed, 250 insertions(+), 83 deletions(-) diff --git a/base/include/ApraNvEglRenderer.h b/base/include/ApraNvEglRenderer.h index 3526c4dd1..5c9a644d6 100644 --- a/base/include/ApraNvEglRenderer.h +++ b/base/include/ApraNvEglRenderer.h @@ -138,7 +138,8 @@ class NvEglRenderer * @return 0 for success, -1 otherwise. */ static int getDisplayResolution(uint32_t &width, uint32_t &height); - + bool renderAndDrawLoop(); + bool windowDrag(); /** * Sets the overlay string. * @@ -149,13 +150,22 @@ class NvEglRenderer * @return 0 for success, -1 otherwise. */ int setOverlayText(char *str, uint32_t x, uint32_t y); - -private: +public: Display * x_display; /**< Connection to the X server created using XOpenDisplay(). */ Window x_window; /**< Holds the window to be used for rendering created using XCreateWindow(). */ + uint32_t mWidth,mHeight; + + int drag_start_x = 0; + int drag_start_y = 0; + bool is_dragging = false; + uint32_t _x_offset = 0; + uint32_t _y_offset = 0; + XEvent event; + bool drawBorder = false; + EGLDisplay egl_display; /**< Holds the EGL Display connection. */ EGLContext egl_context; /**< Holds the EGL rendering context. */ EGLSurface egl_surface; /**< Holds the EGL Window render surface. */ diff --git a/base/include/Command.h b/base/include/Command.h index 3fbc0406e..5014a5f6b 100755 --- a/base/include/Command.h +++ b/base/include/Command.h @@ -25,7 +25,8 @@ class Command NVRCommandExportView, MP4WriterLastTS, MMQtimestamps, - Rendertimestamp + Rendertimestamp, + RenderPlayPause }; Command() @@ -594,4 +595,28 @@ class PlayPauseCommand : public Command ar& speed; ar& direction; } +}; + +class RenderPlayPause : public Command +{ +public: + RenderPlayPause() : Command(Command::CommandType::RenderPlayPause) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(pauseRenderer); + } + + bool pauseRenderer; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& pauseRenderer; + } }; \ No newline at end of file diff --git a/base/src/ImageViewerModule.cpp b/base/src/ImageViewerModule.cpp index bfa361d0e..cc2603d18 100755 --- a/base/src/ImageViewerModule.cpp +++ b/base/src/ImageViewerModule.cpp @@ -284,11 +284,11 @@ bool ImageViewerModule::handleCommand(Command::CommandType type, frame_sp &frame return true; } - else if (type == Command::CommandType::NVRCommandView) + else if (type == Command::CommandType::RenderPlayPause) { - NVRCommandView cmd; + RenderPlayPause cmd; getCommand(cmd, frame); - if(cmd.doView) + if(cmd.pauseRenderer) { showRender = true; return true; diff --git a/base/src/Module.cpp b/base/src/Module.cpp index c8cab7e51..9aab9b0a2 100644 --- a/base/src/Module.cpp +++ b/base/src/Module.cpp @@ -1216,9 +1216,16 @@ bool Module::step() return true; } - mProfiler->startProcessingLap(); - ret = stepNonSource(frames); - mProfiler->endLap(mQue->size()); + if(mPlay) + { + mProfiler->startProcessingLap(); + ret = stepNonSource(frames); + mProfiler->endLap(mQue->size()); + } + else + { + ret = true; + } } return ret; diff --git a/base/src/Mp4ReaderSource.cpp b/base/src/Mp4ReaderSource.cpp index 5052999e9..90019a33e 100644 --- a/base/src/Mp4ReaderSource.cpp +++ b/base/src/Mp4ReaderSource.cpp @@ -83,6 +83,10 @@ class Mp4ReaderDetailAbs mState.mVideoPath = videoPath; mProps = props; mState.end = false; + if(boost::filesystem::path(videoPath).extension() == ".mp4") + { + isVideoFileFound = true; + } } void setProps(Mp4ReaderSourceProps& props) diff --git a/base/src/Mp4WriterSink.cpp b/base/src/Mp4WriterSink.cpp index 277032ea9..e37fec0f4 100644 --- a/base/src/Mp4WriterSink.cpp +++ b/base/src/Mp4WriterSink.cpp @@ -205,6 +205,7 @@ class DetailAbs if (mux) { mp4_mux_close(mux); + mux = nullptr; } return true; } diff --git a/base/src/NvEglRenderer.cpp b/base/src/NvEglRenderer.cpp index 07ff7a27c..2c2e7a372 100644 --- a/base/src/NvEglRenderer.cpp +++ b/base/src/NvEglRenderer.cpp @@ -60,6 +60,16 @@ NvEglRenderer::NvEglRenderer(const char *name, uint32_t width, uint32_t height, XSetWindowAttributes window_attributes; x_window = 0; x_display = NULL; + XColor color, dummy; + XGCValues gr_values; + + this->mWidth = width; + this->mHeight = height; + this->drawBorder = false; + + this->_x_offset = x_offset; + this->_y_offset = y_offset; + texture_id = 0; gc = NULL; @@ -104,14 +114,14 @@ NvEglRenderer::NvEglRenderer(const char *name, uint32_t width, uint32_t height, y_offset = 0; } - window_attributes.override_redirect = 0; depth = DefaultDepth(x_display, DefaultScreen(x_display)); + //window_attributes.override_redirect = 1; window_attributes.background_pixel = BlackPixel(x_display, DefaultScreen(x_display)); - window_attributes.override_redirect = displayOnTop; + window_attributes.override_redirect = (displayOnTop ? 1 : 0); Atom WM_HINTS; if(window_attributes.override_redirect == 0) { @@ -134,10 +144,9 @@ NvEglRenderer::NvEglRenderer(const char *name, uint32_t width, uint32_t height, (CWBackPixel | CWOverrideRedirect), &window_attributes); - if(window_attributes.override_redirect == 0) { - XStoreName(x_display, x_window, "LIVE WINDOW"); + XStoreName(x_display, x_window, "ApraEglRenderer"); XFlush(x_display); XSizeHints hints; @@ -148,41 +157,166 @@ NvEglRenderer::NvEglRenderer(const char *name, uint32_t width, uint32_t height, hints.flags = PPosition | PSize; XSetWMNormalHints(x_display, x_window, &hints); - // Set Motif hints for window manager - Atom _MOTIF_WM_HINTS = XInternAtom(x_display, "_MOTIF_WM_HINTS", True); - if (_MOTIF_WM_HINTS != None) - { - struct - { - unsigned long flags; - unsigned long functions; - unsigned long decorations; - long inputMode; - unsigned long status; - } WM_HINTS = { (1L << 1), 0, 1, 0, 0 }; // Setting decorations to 1 adds title bar - XChangeProperty(x_display, x_window, _MOTIF_WM_HINTS, _MOTIF_WM_HINTS, 32, - PropModeReplace, (unsigned char *)&WM_HINTS, 5); - } - - Atom WM_DELETE_WINDOW = XInternAtom(x_display, "WM_DELETE_WINDOW", False); - XSetWMProtocols(x_display, x_window, &WM_DELETE_WINDOW, 1); - } + WM_HINTS = XInternAtom(x_display, "_MOTIF_WM_HINTS", True); + XChangeProperty(x_display, x_window, WM_HINTS, WM_HINTS, 32, + PropModeReplace, (unsigned char *)&WM_HINTS, 5); + } + + XSelectInput(x_display, (int32_t) x_window, ButtonPressMask | + NoEventMask | + KeyPressMask | + KeyReleaseMask | + ButtonReleaseMask | + EnterWindowMask | + LeaveWindowMask | + PointerMotionMask | + PointerMotionHintMask | + Button1MotionMask | + Button2MotionMask | + Button3MotionMask | + Button4MotionMask | + Button5MotionMask | + ButtonMotionMask | + KeymapStateMask | + ExposureMask | + VisibilityChangeMask | + StructureNotifyMask | + ResizeRedirectMask | + SubstructureNotifyMask | + SubstructureRedirectMask | + FocusChangeMask | + PropertyChangeMask | + ColormapChangeMask | + OwnerGrabButtonMask); + + fontinfo = XLoadQueryFont(x_display, "9x15bold"); + + // XAllocNamedColor(x_display, DefaultColormap(x_display, screen_num), "green", &color, &dummy); + // XSetWindowBorder(x_display, x_window, color.pixel); + + // gr_values.font = fontinfo->fid; + // gr_values.foreground = color.pixel; + // gr_values.line_width = 5; + + // gc = XCreateGC(x_display, x_window, GCFont | GCForeground | GCLineWidth, &gr_values); + + // XFlush(x_display); + // XMapWindow(x_display, (int32_t)x_window); + // XFlush(x_display); + + XMapWindow(x_display, (int32_t)x_window); + gc = XCreateGC(x_display, x_window, 0, NULL); - XSelectInput(x_display, (int32_t) x_window, ExposureMask); - XMapWindow(x_display, (int32_t) x_window); - gc = XCreateGC(x_display, x_window, 0, NULL); + XSetForeground(x_display, gc, + WhitePixel(x_display, DefaultScreen(x_display))); + fontinfo = XLoadQueryFont(x_display, "9x15bold"); + pthread_mutex_lock(&render_lock); + pthread_create(&render_thread, NULL, renderThread, this); + pthread_setname_np(render_thread, "EglRenderer"); + pthread_cond_wait(&render_cond, &render_lock); + pthread_mutex_unlock(&render_lock); - XSetForeground(x_display, gc, - WhitePixel(x_display, DefaultScreen(x_display)) ); - fontinfo = XLoadQueryFont(x_display, "9x15bold"); + return; +} - pthread_mutex_lock(&render_lock); - pthread_create(&render_thread, NULL, renderThread, this); - pthread_setname_np(render_thread, "EglRenderer"); - pthread_cond_wait(&render_cond, &render_lock); - pthread_mutex_unlock(&render_lock); +bool NvEglRenderer::renderAndDrawLoop() +{ + if (drawBorder) + { + XDrawRectangle(x_display, x_window, gc, 0, 0, (mWidth)-1, (mHeight)-1); + XFlush(x_display); + } + return true; +} - return; +bool NvEglRenderer::windowDrag() +{ + if (XCheckMaskEvent(x_display, + ButtonPressMask | + NoEventMask | + KeyPressMask | + KeyReleaseMask | + ButtonReleaseMask | + EnterWindowMask | + LeaveWindowMask | + PointerMotionMask | + PointerMotionHintMask | + Button1MotionMask | + Button2MotionMask | + Button3MotionMask | + Button4MotionMask | + Button5MotionMask | + ButtonMotionMask | + KeymapStateMask | + ExposureMask | + VisibilityChangeMask | + StructureNotifyMask | + ResizeRedirectMask | + SubstructureNotifyMask | + SubstructureRedirectMask | + FocusChangeMask | + PropertyChangeMask | + ColormapChangeMask | + OwnerGrabButtonMask, + &event)) + { + if (event.type == ButtonPress) + { + if (event.xbutton.button == Button1) + { + drag_start_x = event.xbutton.x_root - _x_offset; + drag_start_y = event.xbutton.y_root - _y_offset; + is_dragging = true; + } + } + else if (event.type == MotionNotify) + { + if (is_dragging) + { + int screen = DefaultScreen(x_display); + _x_offset = event.xbutton.x_root - drag_start_x; + _y_offset = event.xbutton.y_root - drag_start_y; + int centerX = _x_offset + mWidth / 2; + int centerY = _y_offset + mHeight / 2; + int screenWidth = XDisplayWidth(x_display, screen); + int screenHeight = XDisplayHeight(x_display, screen); + + // Determine the closest corner + int closestX, closestY; + + if (centerX <= screenWidth / 2) + { + closestX = 0; + } + else + { + closestX = screenWidth - mWidth; + } + + if (centerY <= screenHeight / 2) + { + closestY = 0; + } + else + { + closestY = screenHeight - mHeight; + } + + // Move the window to the closest corner + // XMoveWindow(x_display, x_window, _x_offset, _y_offset); + XMoveWindow(x_display, x_window, closestX, closestY); + XFlush(x_display); + } + } + else if (event.type == ButtonRelease) + { + if (event.xbutton.button == Button1) + { + is_dragging = false; + } + } + } + return true; } int @@ -284,8 +418,9 @@ NvEglRenderer::renderThread(void *arg) break; } + renderer->windowDrag(); renderer->renderInternal(); - + renderer->renderAndDrawLoop(); pthread_mutex_lock(&renderer->render_lock); pthread_cond_broadcast(&renderer->render_cond); } @@ -330,6 +465,7 @@ NvEglRenderer::renderThread(void *arg) pthread_mutex_lock(&renderer->render_lock); pthread_cond_broadcast(&renderer->render_cond); pthread_mutex_unlock(&renderer->render_lock); + return NULL; error: diff --git a/base/src/ThumbnailListGenerator.cpp b/base/src/ThumbnailListGenerator.cpp index fda936ba4..cc74c9c8d 100644 --- a/base/src/ThumbnailListGenerator.cpp +++ b/base/src/ThumbnailListGenerator.cpp @@ -108,49 +108,34 @@ bool ThumbnailListGenerator::process(frame_container &frames) return true; } - // ImagePlanes mImagePlanes; - // DMAFrameUtils::GetImagePlanes mGetImagePlanes; - // int mNumPlanes = 0; + ImagePlanes mImagePlanes; + DMAFrameUtils::GetImagePlanes mGetImagePlanes; + int mNumPlanes = 0; + size_t mSize; - framemetadata_sp frameMeta = frame->getMetadata(); - - // mGetImagePlanes = DMAFrameUtils::getImagePlanesFunction(frameMeta, mImagePlanes); - // mNumPlanes = static_cast(mImagePlanes.size()); - - // mGetImagePlanes(frame, mImagePlanes); - - // uint8_t* dstPtr = (uint8_t*) malloc(frameMeta->getDataSize()); - // for (auto i = 0; i < mNumPlanes; i++) - // { - // mImagePlanes[i]->mCopyToData(mImagePlanes[i].get(), dstPtr); - // dstPtr += mImagePlanes[i]->imageSize; - // } - - // FrameMetadata::FrameType fType = frameMeta->getFrameType(); + framemetadata_sp frameMeta = frame->getMetadata(); + auto rawPlanarMetadata = FrameMetadataFactory::downcast(frameMeta); + auto height = rawPlanarMetadata->getHeight(0); + auto width = rawPlanarMetadata->getWidth(0); - // uint8_t* dstPtr = (uint8_t*) malloc(frame->size()); - // auto frameSize = frame->size(); + mGetImagePlanes = DMAFrameUtils::getImagePlanesFunction(frameMeta, mImagePlanes); + mNumPlanes = static_cast(mImagePlanes.size()); + mSize = width * height * 1.5; + mGetImagePlanes(frame, mImagePlanes); - // dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtrY(); - // dstPtr += frameSize / 2; - // dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtrU(); - // dstPtr += frameSize / 4; - // dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtrV(); - // dstPtr += frameSize / 4; - // dstPtr -= frameSize; + uint8_t data = 0; + cv::Mat yuvImage(height * 1.5, width, CV_8UC1, data); - auto dstPtr = (uint8_t*)(static_cast(frame->data()))->getHostPtr(); + uint8_t* dstPtr = yuvImage.data; + for (auto i = 0; i < mNumPlanes; i++) + { + const auto& plane = mImagePlanes[i]; + std::memcpy(dstPtr, plane->data, plane->imageSize); + dstPtr += plane->imageSize; + } - auto rawPlanarMetadata = FrameMetadataFactory::downcast(frameMeta); - auto height = rawPlanarMetadata->getHeight(0); - auto width = rawPlanarMetadata->getWidth(0); - LOG_ERROR << "width = "<< width; - LOG_ERROR << "height = "<< height; auto st = rawPlanarMetadata->getStep(0); - uint8_t data = 0; cv::Mat bgrImage; - auto yuvImage = cv::Mat(height * 1.5, width, CV_8UC1, static_cast(&data)); - yuvImage.data = static_cast(dstPtr); cv::cvtColor(yuvImage, bgrImage, cv::COLOR_YUV2BGRA_NV12); cv::Mat bgrImageResized; @@ -208,7 +193,6 @@ bool ThumbnailListGenerator::process(frame_container &frames) // Clean up the JPEG compression object and close the output file jpeg_destroy_compress(&cinfo); fclose(outfile); - LOG_ERROR << "wrote thumbail"; #endif return true; } From 90e549dccae447174645e3f2bc13512243b6bf09 Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Fri, 5 Jan 2024 16:16:14 +0530 Subject: [PATCH 13/19] temp commit --- base/CMakeLists.txt | 40 +++ base/include/Background.h | 3 + base/include/FrameMetadata.h | 2 - base/include/GLUtils.h | 12 + base/include/GtkGlRenderer.h | 39 +++ base/include/Matrix.h | 4 + base/include/Model.h | 7 + base/include/Program.h | 22 ++ base/include/View.h | 5 + base/include/stdafx.h | 10 +- base/src/Background.cpp | 144 +++++++++ base/src/GtkGlRenderer.cpp | 379 ++++++++++++++++++++++ base/src/Matrix.cpp | 99 ++++++ base/src/Model.cpp | 508 ++++++++++++++++++++++++++++++ base/src/Program.cpp | 315 ++++++++++++++++++ base/src/RTSPClientSrc.cpp | 29 +- base/src/View.cpp | 72 +++++ base/test/gtkglrenderer_tests.cpp | 316 +++++++++++++++++++ 18 files changed, 1990 insertions(+), 16 deletions(-) create mode 100644 base/include/Background.h create mode 100644 base/include/GLUtils.h create mode 100644 base/include/GtkGlRenderer.h create mode 100644 base/include/Matrix.h create mode 100644 base/include/Model.h create mode 100644 base/include/Program.h create mode 100644 base/include/View.h create mode 100644 base/src/Background.cpp create mode 100644 base/src/GtkGlRenderer.cpp create mode 100644 base/src/Matrix.cpp create mode 100644 base/src/Model.cpp create mode 100644 base/src/Program.cpp create mode 100644 base/src/View.cpp create mode 100644 base/test/gtkglrenderer_tests.cpp diff --git a/base/CMakeLists.txt b/base/CMakeLists.txt index c710af12b..3332d8b3d 100755 --- a/base/CMakeLists.txt +++ b/base/CMakeLists.txt @@ -51,6 +51,9 @@ find_package(ZXing CONFIG REQUIRED) find_package(bigint CONFIG REQUIRED) find_package(SFML COMPONENTS system window audio graphics CONFIG REQUIRED) +pkg_check_modules(GTK3 REQUIRED gtk+-3.0) +pkg_check_modules(GDK3 REQUIRED gdk-3.0) + IF(ENABLE_CUDA) if((NOT DEFINED CMAKE_CUDA_ARCHITECTURES) OR (CMAKE_CUDA_ARCHITECTURES STREQUAL "")) set(CMAKE_CUDA_ARCHITECTURES 52 60 70 75) @@ -90,6 +93,20 @@ IF(ENABLE_CUDA) ${NVARGUS_SOCKETCLINET_LIB} ) include_directories(AFTER SYSTEM /usr/local/cuda/include) + include_directories(AFTER SYSTEM /usr/include/gtk-3.0/) + include_directories(AFTER SYSTEM /usr/include/glib-2.0/) + include_directories(AFTER SYSTEM /usr/local/cuda/include) + include_directories(AFTER SYSTEM /usr/include/gtk-3.0/) + include_directories(AFTER SYSTEM /usr/include/glib-2.0/) + include_directories(AFTER SYSTEM /usr/lib/aarch64-linux-gnu/glib-2.0/include/) + include_directories(AFTER SYSTEM /usr/include/pango-1.0/) + include_directories(AFTER SYSTEM /usr/include/harfbuzz/) + include_directories(AFTER SYSTEM /usr/include/cairo/) + include_directories(AFTER SYSTEM /usr/include/atk-1.0/) + include_directories(AFTER SYSTEM /usr/include/gdk-pixbuf-2.0/) + include_directories(AFTER SYSTEM /usr/local/cuda/samples/common/inc/) + include_directories(AFTER SYSTEM /usr/include/) + include_directories(AFTER SYSTEM /mnt/disks/ssd/NVR/apranvr/thirdparty/ApraGTKUtils/includes/) ELSEIF(ENABLE_LINUX) find_library(LIBNVCUVID libnvcuvid.so PATHS ../thirdparty/Video_Codec_SDK_10.0.26/Lib/linux/stubs/x86_64 NO_DEFAULT_PATH) find_library(LIBNVENCODE libnvidia-encode.so PATHS ../thirdparty/Video_Codec_SDK_10.0.26/Lib/linux/stubs/x86_64 NO_DEFAULT_PATH) @@ -374,6 +391,12 @@ IF(ENABLE_ARM64) src/DMAFDToHostCopy.cpp src/H264DecoderV4L2Helper.cpp src/H264DecoderV4L2Helper.h + src/Background.cpp + src/GtkGlRenderer.cpp + src/Matrix.cpp + src/Model.cpp + src/Program.cpp + src/View.cpp ) ELSE() SET(CUDA_IP_FILES ${CUDA_IP_FILES} # following modules and related files do not work on ARM64 @@ -424,6 +447,13 @@ IF(ENABLE_ARM64) include/ApraEGLDisplay.h include/DMAFrameUtils.h include/DMAFDToHostCopy.h + include/Background.h + include/GLUtils.h + include/GtkGlRenderer.h + include/Matrix.h + include/Model.h + include/Program.h + include/View.h ) ELSE() SET(CUDA_IP_FILES_H ${CUDA_IP_FILES_H} # following modules and related files do not work on ARM64 @@ -482,6 +512,7 @@ IF (ENABLE_ARM64) test/apraegldisplay_tests.cpp test/frame_factory_test_dma.cpp test/h264decoder_tests.cpp + test/gtkglrenderer_tests.cpp ) ENDIF(ENABLE_ARM64) @@ -595,6 +626,15 @@ find_library(LIBMP4_LIB NAMES mp4lib.lib libmp4lib.a REQUIRED) target_link_libraries(aprapipesut aprapipes + /usr/lib/aarch64-linux-gnu/libgtk-3-0 + /usr/lib/aarch64-linux-gnu/libGLEW.so + /usr/lib/aarch64-linux-gnu/libgdk-3.so + /usr/lib/aarch64-linux-gnu/libGL.so.1 + /usr/lib/aarch64-linux-gnu/libgdk_pixbuf-2.0.so.0 + /usr/lib/aarch64-linux-gnu/libgio-2.0.so.0 + /usr/lib/aarch64-linux-gnu/libgobject-2.0.so.0 + ${GTK3_LIBRARIES} + ${GDK3_LIBRARIES} ${JPEG_LIBRARIES} ${LIBMP4_LIB} ${OPENH264_LIB} diff --git a/base/include/Background.h b/base/include/Background.h new file mode 100644 index 000000000..af1473046 --- /dev/null +++ b/base/include/Background.h @@ -0,0 +1,3 @@ +void background_draw (void); +void background_init (void); +void background_set_window (int width, int height); diff --git a/base/include/FrameMetadata.h b/base/include/FrameMetadata.h index ca8e5f646..278dbb432 100755 --- a/base/include/FrameMetadata.h +++ b/base/include/FrameMetadata.h @@ -56,11 +56,9 @@ class FrameMetadata { enum MemType { HOST = 1, -#ifdef APRA_CUDA_ENABLED HOST_PINNED = 2, CUDA_DEVICE = 3, DMABUF = 4 -#endif }; FrameMetadata(FrameType _frameType) diff --git a/base/include/GLUtils.h b/base/include/GLUtils.h new file mode 100644 index 000000000..ed2a3ce92 --- /dev/null +++ b/base/include/GLUtils.h @@ -0,0 +1,12 @@ +// Get number of elements in an array: +#define NELEM(array) (sizeof(array) / sizeof(*(array))) + +// Loop over an array of given size: +#define FOREACH_NELEM(array, nelem, iter) \ + for (__typeof__(*(array)) *iter = (array); \ + iter < (array) + (nelem); \ + iter++) + +// Loop over an array of known size: +#define FOREACH(array, iter) \ + FOREACH_NELEM(array, NELEM(array), iter) diff --git a/base/include/GtkGlRenderer.h b/base/include/GtkGlRenderer.h new file mode 100644 index 000000000..87960b16b --- /dev/null +++ b/base/include/GtkGlRenderer.h @@ -0,0 +1,39 @@ +#pragma once + +#include "Module.h" +#include // remove this +#include +class GtkGlRendererProps : public ModuleProps +{ +public: + GtkGlRendererProps(GtkWidget* _glArea, int _windowWidth, int _windowHeight) : ModuleProps() // take gtk string + { + // gladeFileName = _gladeFileName; + glArea = _glArea; + windowWidth = _windowWidth; + windowHeight = _windowHeight; + } + GtkWidget* glArea; + int windowWidth, windowHeight; +}; + +class GtkGlRenderer : public Module +{ +public: + GtkGlRenderer(GtkGlRendererProps props); + ~GtkGlRenderer(); + + bool init(); + bool term(); + bool changeProps(GtkWidget* glArea, int windowWidth, int windowHeight); + // wait_for_exit + +protected: + bool process(frame_container& frames); + bool processSOS(frame_sp &frame); + bool validateInputPins(); + bool shouldTriggerSOS(); +private: + class Detail; + boost::shared_ptr mDetail; +}; diff --git a/base/include/Matrix.h b/base/include/Matrix.h new file mode 100644 index 000000000..58e0669f7 --- /dev/null +++ b/base/include/Matrix.h @@ -0,0 +1,4 @@ +void mat_frustum (float *matrix, float angle_of_view, float aspect_ratio, float z_near, float z_far); +void mat_translate (float *matrix, float dx, float dy, float dz); +void mat_rotate (float *matrix, float x, float y, float z, float angle); +void mat_multiply (float *matrix, float *a, float *b); diff --git a/base/include/Model.h b/base/include/Model.h new file mode 100644 index 000000000..55d23b67b --- /dev/null +++ b/base/include/Model.h @@ -0,0 +1,7 @@ +void model_init (void); +void model_draw (void); +void draw_frames(void); +void drawCameraFrame(void* frameData, int width, int height); +const float *model_matrix(void); +void model_pan_start (int x, int y); +void model_pan_move (int x, int y); diff --git a/base/include/Program.h b/base/include/Program.h new file mode 100644 index 000000000..8d40bd557 --- /dev/null +++ b/base/include/Program.h @@ -0,0 +1,22 @@ +#include + +void initProgram (void); +void programs_init (void); +void program_cube_use (void); +void program_bkgd_use (void); + +enum LocBkgd { + LOC_BKGD_VERTEX, + LOC_BKGD_TEXTURE, +}; + +enum LocCube { + LOC_CUBE_VIEW, + LOC_CUBE_MODEL, + LOC_CUBE_VERTEX, + LOC_CUBE_VCOLOR, + LOC_CUBE_NORMAL, +}; + +GLint program_bkgd_loc (const enum LocBkgd); +GLint program_cube_loc (const enum LocCube); diff --git a/base/include/View.h b/base/include/View.h new file mode 100644 index 000000000..186a1aead --- /dev/null +++ b/base/include/View.h @@ -0,0 +1,5 @@ +void initZVal(void); +const float *view_matrix (void); +void view_set_window (int width, int height); +void view_z_decrease (void); +void view_z_increase (void); diff --git a/base/include/stdafx.h b/base/include/stdafx.h index 1c8927529..cf0b4a07a 100755 --- a/base/include/stdafx.h +++ b/base/include/stdafx.h @@ -3,13 +3,13 @@ // are changed infrequently // -#pragma once +// #pragma once -#ifndef LINUX -#include "targetver.h" +// #ifndef LINUX +// #include "targetver.h" -#define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers -#endif +// #define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers +// #endif // TODO: reference additional headers your program requires here diff --git a/base/src/Background.cpp b/base/src/Background.cpp new file mode 100644 index 000000000..0f01be471 --- /dev/null +++ b/base/src/Background.cpp @@ -0,0 +1,144 @@ +#include +//yash change +// #include +#ifndef GL_H +#define GL_H +#include +#include +#endif +// #include +#include "Program.h" + +static GLuint texture; +static GLuint vao, vbo; + +// Each vertex has space and texture coordinates: +struct vertex { + float x; + float y; + float u; + float v; +} __attribute__((packed)); + +void +background_set_window (int width, int height) +{ + // The background quad is made of four vertices: + // + // 3--2 + // | | + // 0--1 + // + struct vertex vertex[4] = { + { -1, -1, 0, 0 }, // Bottom left + { 1, -1, 1, 0 }, // Bottom right + { 1, 1, 1, 1 }, // Top right + { -1, 1, 0, 1 }, // Top left + }; + + GLint loc_vertex = program_bkgd_loc(LOC_BKGD_VERTEX); + GLint loc_texture = program_bkgd_loc(LOC_BKGD_TEXTURE); + + glBindVertexArray(vao); + + glEnableVertexAttribArray(loc_vertex); + glEnableVertexAttribArray(loc_texture); + + glBindBuffer(GL_ARRAY_BUFFER, vbo); + glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW); + + glVertexAttribPointer(loc_vertex, 2, GL_FLOAT, GL_FALSE, + sizeof(struct vertex), + (void *) offsetof(struct vertex, x)); + + glVertexAttribPointer(loc_texture, 2, GL_FLOAT, GL_FALSE, + sizeof(struct vertex), + (void *) offsetof(struct vertex, u)); + + glBindVertexArray(0); +} + +void +background_draw (void) +{ + // Array of indices. We define two counterclockwise triangles: + // 0-2-3 and 2-0-1 + //yash change + // static GLubyte index[6] = { + // 0, 1, 1, + // 2, 0, 1, + // 1, 3, 0 + // }; + static GLubyte triangle1[] = { + 0, 1, 2, + 0, 2, 3 + }; + + static GLubyte triangle2[] = { + 4, 5, 6, + 4, 6, 7 + }; + + + program_bkgd_use(); + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, texture); + glBindVertexArray(vao); + //yash change + // glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, index); + glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, triangle1); + + // Draw the second triangle + glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, triangle2); + //yash change end + glBindVertexArray(0); +} + +void +background_init (void) +{ + // Inline data declaration: + // extern char _binary_textures_background_png_start[]; + // extern char _binary_textures_background_png_end[]; + + // char *start = _binary_textures_background_png_start; + // size_t len = _binary_textures_background_png_end + // - _binary_textures_background_png_start; + + char *start ="start"; + size_t len = strlen(start); + + GInputStream *stream; + GdkPixbuf *pixbuf; + + // Create an input stream from inline data: + stream = g_memory_input_stream_new_from_data(start, len, NULL); + + // Generate a pixbuf from the input stream: + pixbuf = gdk_pixbuf_new_from_stream(stream, NULL, NULL); + + // Destroy the stream: + g_object_unref(stream); + + // Generate an OpenGL texture from pixbuf; + // hack a bit by not accounting for pixbuf rowstride: + glGenTextures(1, &texture); + glBindTexture(GL_TEXTURE_2D, texture); + + // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, + // gdk_pixbuf_get_width(pixbuf), + // gdk_pixbuf_get_height(pixbuf), 0, GL_RGBA, GL_UNSIGNED_BYTE, + // gdk_pixbuf_get_pixels(pixbuf)); + + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); + + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + + // Generate empty buffer: + glGenBuffers(1, &vbo); + + // Generate empty vertex array object: + glGenVertexArrays(1, &vao); +} diff --git a/base/src/GtkGlRenderer.cpp b/base/src/GtkGlRenderer.cpp new file mode 100644 index 000000000..33042191d --- /dev/null +++ b/base/src/GtkGlRenderer.cpp @@ -0,0 +1,379 @@ +#include +#include +#include +#include + +#include "Logger.h" +#include "GtkGlRenderer.h" +#include "DMAFDWrapper.h" +#include "Background.h" +#include "Matrix.h" +#include "Model.h" +#include "Program.h" +#include "GLUtils.h" +#include "View.h" + +struct signal +{ + const gchar *signal; + GCallback handler; + GdkEventMask mask; +}; + +class GtkGlRenderer::Detail +{ + +public: + Detail(GtkGlRendererProps &_props) : mProps(_props) + { + isMetadataSet = false; + } + + ~Detail() + { + } + + static void + on_resize(GtkGLArea *area, gint width, gint height, gpointer data) + { + printf("In resize width = %d, height = %d\n", width, height); + view_set_window(width, height); + background_set_window(width, height); + } + void setProps(GtkGlRendererProps &props) + { + mProps = props; + } + static gboolean + on_render(GtkGLArea *glarea, GdkGLContext *context, gpointer data) + { + // Clear canvas: + GtkGlRenderer::Detail *detailInstance = (GtkGlRenderer::Detail *)data; + LOG_DEBUG << "Coming Inside Renderer"; + if (detailInstance->isMetadataSet == false) + { + LOG_INFO << "Metadata is Not Set "; + return TRUE; + } + + if (!detailInstance->cachedFrame.get()) + { + LOG_ERROR << "Got Empty Frame"; + return TRUE; + } + detailInstance->renderFrame = detailInstance->cachedFrame; + void *frameToRender; + if (detailInstance->isDmaMem) + { + // frameToRender = static_cast(detailInstance->renderFrame->data())->getCudaPtr(); + frameToRender = static_cast(detailInstance->renderFrame->data())->getHostPtr(); + } + else + { + frameToRender = detailInstance->renderFrame->data(); + } + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + // Draw background: + background_draw(); + + // Draw model: + // model_draw(); + // draw_frames(); + drawCameraFrame(frameToRender, detailInstance->frameWidth, detailInstance->frameHeight); + //LOG_ERRO<<"Framestep is"<< detailInstance->step; + //drawCameraFrame(frameToRender, 1024, 1024); + + // Don't propagate signal: + return TRUE; + } + + static gboolean + on_realize(GtkGLArea *glarea, GdkGLContext *context, gpointer data) // Process SOS + { + // Make current: + gtk_gl_area_make_current(glarea); + + if (gtk_gl_area_get_error(glarea) != NULL) + { + LOG_ERROR << "Failed to initialize buffer"; + return FALSE; + } + // Print version info: + const GLubyte *renderer = glGetString(GL_RENDERER); + const GLubyte *version = glGetString(GL_VERSION); + + LOG_ERROR << "OpenGL version supported " << version; + + // Enable depth buffer: + gtk_gl_area_set_has_depth_buffer(glarea, TRUE); + + // Init programs: + programs_init(); + + // Init background: + background_init(); + + // Init model: + model_init(); + + // Get frame clock: + GdkGLContext *glcontext = gtk_gl_area_get_context(glarea); + GdkWindow *glwindow = gdk_gl_context_get_window(glcontext); + GdkFrameClock *frame_clock = gdk_window_get_frame_clock(glwindow); + + // Connect update signal: + g_signal_connect_swapped(frame_clock, "update", G_CALLBACK(gtk_gl_area_queue_render), glarea); + + // Start updating: + gdk_frame_clock_begin_updating(frame_clock); + return TRUE; + } + + // on_unrealize() + // { + // // model_cleanup(); + // // background_cleanup(); + // // programs_cleanup(); + + // // Get the frame clock and disconnect the update signal + // GdkGLContext *glcontext = gtk_gl_area_get_context(GTK_GL_AREA(glarea)); + // GdkWindow *glwindow = gdk_gl_context_get_window(glcontext); + // GdkFrameClock *frame_clock = gdk_window_get_frame_clock(glwindow); + // g_signal_handlers_disconnect_by_func(frame_clock, G_CALLBACK(gtk_gl_area_queue_render), glarea); + // GtkWidget *parent_container = gtk_widget_get_parent(glarea); + + // // Remove the GtkGLArea from its parent container + // gtk_container_remove(GTK_CONTAINER(parent_container), glarea); + + // // Destroy the GtkGLArea widget + // gtk_widget_destroy(glarea); + // } + + + // void on_unrealize() + // { + // GdkGLContext *glcontext = gtk_gl_area_get_context(GTK_GL_AREA(glarea)); + // GdkWindow *glwindow = gdk_gl_context_get_window(glcontext); + // GdkFrameClock *frame_clock = gdk_window_get_frame_clock(glwindow); + + // // Disconnect the update signal from frame_clock + // //g_signal_handlers_disconnect_by_func(frame_clock, G_CALLBACK(gtk_gl_area_queue_render), G_OBJECT(glarea)); + + // // Get the parent container + // GtkWidget *parent_container = gtk_widget_get_parent(glarea); + + // // Remove the GtkGLArea from its parent container + // gtk_container_remove(GTK_CONTAINER(parent_container), glarea); + + // // Destroy the GtkGLArea widget + // gtk_widget_destroy(glarea); + // } + + static gboolean + on_scroll(GtkWidget *widget, GdkEventScroll *event, gpointer data) + { + switch (event->direction) + { + case GDK_SCROLL_UP: + view_z_decrease(); + break; + + case GDK_SCROLL_DOWN: + view_z_increase(); + break; + + default: + break; + } + + return FALSE; + } + + void + connect_signals(GtkWidget *widget, struct signal *signals, size_t members) + { + FOREACH_NELEM(signals, members, s) + { + gtk_widget_add_events(widget, s->mask); + g_signal_connect(widget, s->signal, s->handler, this); + } + } + + void + connect_window_signals(GtkWidget *window) + { + struct signal signals[] = { + {"destroy", G_CALLBACK(gtk_main_quit), (GdkEventMask)0}, + }; + + connect_signals(window, signals, NELEM(signals)); + } + + void + connect_glarea_signals(GtkWidget *glarea) + { + // {"resize", G_CALLBACK(on_resize), (GdkEventMask)0}, + // {"scroll-event", G_CALLBACK(on_scroll), GDK_SCROLL_MASK}, + //connect_signals(glarea, signals, NELEM(signals)); + g_signal_connect(glarea, "realize", G_CALLBACK(on_realize), this); + g_signal_connect(glarea, "render", G_CALLBACK(on_render), this); + g_signal_connect(glarea, "resize", G_CALLBACK(on_resize), this); + } + + bool init() + { + connect_glarea_signals(glarea); + // initialize_gl(GTK_GL_AREA(glarea)); + return true; + } + + GtkWidget *glarea; + int windowWidth, windowHeight; + uint64_t frameWidth, frameHeight; + frame_sp cachedFrame, renderFrame; + void *frameToRender; + bool isDmaMem; + bool isMetadataSet; + GtkGlRendererProps mProps; +}; + +GtkGlRenderer::GtkGlRenderer(GtkGlRendererProps props) : Module(SINK, "GtkGlRenderer", props) +{ + mDetail.reset(new Detail(props)); + mDetail->glarea = props.glArea; + mDetail->windowWidth = props.windowWidth; + mDetail->windowHeight = props.windowHeight; +} + +GtkGlRenderer::~GtkGlRenderer() {} + +bool GtkGlRenderer::init() +{ + if (!Module::init()) + { + return false; + } + if (!mDetail->init()) + { + LOG_ERROR << "Failed To Initialize GtkGl Area "; + return false; + } + return true; +} + +bool GtkGlRenderer::process(frame_container &frames) + +{ + // LOG_ERROR << "GOT " + auto frame = frames.cbegin()->second; + mDetail->cachedFrame = frame; + return true; +} + +// Need to check on Mem Type Supported +// Already Checked With CPU , Need to check with +// framemetadata_sp metadata = getFirstInputMetadata(); +// FrameMetadata::MemType memType = metadata->getMemType(); +// if (memType != FrameMetadata::MemType::DMABUF) +// { +// LOG_ERROR << "<" << getId() << ">::validateInputPins input memType is expected to be DMABUF. Actual<" << memType << ">"; +// return false; +// } + +bool GtkGlRenderer::validateInputPins() +{ + if (getNumberOfInputPins() < 1) + { + LOG_ERROR << "<" << getId() << ">::validateInputPins size is expected to be 1. Actual<" << getNumberOfInputPins() << ">"; + return false; + } + + return true; +} + +bool GtkGlRenderer::term() +{ + bool res = Module::term(); + return res; +} + +bool GtkGlRenderer::changeProps(GtkWidget* glArea, int windowWidth, int windowHeight) +{ + LOG_ERROR << "Before changing props ============"<glarea; + //mDetail->on_unrealize(); + mDetail->glarea = glArea; + mDetail->windowWidth = windowWidth; + mDetail->windowHeight = windowHeight; + mDetail->init(); + LOG_ERROR << "After changing props ============"<glarea; +} + +bool GtkGlRenderer::shouldTriggerSOS() +{ + if(!mDetail->isMetadataSet) + { + LOG_ERROR << "WIll Trigger SOS"; + return true; + } + return false; +} + +bool GtkGlRenderer::processSOS(frame_sp &frame) +{ + LOG_INFO<<"I AM IN PROCESS-SOS !!!"; + auto inputMetadata = frame->getMetadata(); + auto frameType = inputMetadata->getFrameType(); + LOG_INFO<<"GOT METADATA "<getFrameType(); + int width = 0; + int height = 0; + + switch (frameType) + { + case FrameMetadata::FrameType::RAW_IMAGE: + { + auto metadata = FrameMetadataFactory::downcast(inputMetadata); + if (metadata->getImageType() != ImageMetadata::RGBA ) + { + throw AIPException(AIP_FATAL, "Unsupported ImageType, Currently Only RGB , BGR , BGRA and RGBA is supported<" + std::to_string(frameType) + ">"); + } + mDetail->frameWidth = metadata->getWidth(); + mDetail->frameHeight = metadata->getHeight(); + mDetail->isDmaMem = metadata->getMemType() == FrameMetadata::MemType::DMABUF; + + LOG_ERROR << "Width is " << metadata->getWidth() << "Height is " << metadata->getHeight(); + //LOG_ERROR << "Width STEP is " << metadata-> + FrameMetadata::MemType memType = metadata->getMemType(); + { if (memType != FrameMetadata::MemType::DMABUF) + + LOG_ERROR << "Memory Type Is Not DMA but it's a interleaved Image"; + } + } + break; + case FrameMetadata::FrameType::RAW_IMAGE_PLANAR: + { + auto metadata = FrameMetadataFactory::downcast(inputMetadata); + if (metadata->getImageType() != ImageMetadata::RGBA ) + { + throw AIPException(AIP_FATAL, "Unsupported ImageType, Currently Only RGB, BGR, BGRA and RGBA is supported<" + std::to_string(frameType) + ">"); + } + mDetail->frameWidth = metadata->getWidth(0); + mDetail->frameHeight = metadata->getHeight(0); + mDetail->isDmaMem = metadata->getMemType() == FrameMetadata::MemType::DMABUF; + LOG_ERROR << "Width is " << metadata->getWidth(0) << "Height is " << metadata->getHeight(0); + FrameMetadata::MemType memType = metadata->getMemType(); + if (memType != FrameMetadata::MemType::DMABUF) + { + LOG_ERROR << "Memory Type Is Not DMA but it's a planar Image"; + } + } + break; + default: + throw AIPException(AIP_FATAL, "Unsupported FrameType<" + std::to_string(frameType) + ">"); + } + mDetail->isMetadataSet = true; + LOG_ERROR << "Done Setting Metadata=========================>"; + // mDetail->init(renderHeight, renderWidth); + return true; +} + + diff --git a/base/src/Matrix.cpp b/base/src/Matrix.cpp new file mode 100644 index 000000000..65a4b8613 --- /dev/null +++ b/base/src/Matrix.cpp @@ -0,0 +1,99 @@ +#include + +void +mat_frustum (float *matrix, float angle_of_view, float aspect_ratio, float z_near, float z_far) +{ + matrix[0] = 1.0f / tanf(angle_of_view); + matrix[1] = 0.0f; + matrix[2] = 0.0f; + matrix[3] = 0.0f; + matrix[4] = 0.0f; + matrix[5] = aspect_ratio / tanf(angle_of_view); + matrix[6] = 0.0f; + matrix[7] = 0.0f; + matrix[8] = 0.0f; + matrix[9] = 0.0f; + matrix[10] = (z_far + z_near) / (z_far - z_near); + matrix[11] = 1.0f; + matrix[12] = 0.0f; + matrix[13] = 0.0f; + matrix[14] = -2.0f * z_far * z_near / (z_far - z_near); + matrix[15] = 0.0f; +} + +void +mat_translate (float *matrix, float dx, float dy, float dz) +{ + matrix[0] = 1; + matrix[1] = 0; + matrix[2] = 0; + matrix[3] = 0; + matrix[4] = 0; + matrix[5] = 1; + matrix[6] = 0; + matrix[7] = 0; + matrix[8] = 0; + matrix[9] = 0; + matrix[10] = 1; + matrix[11] = 0; + matrix[12] = dx; + matrix[13] = dy; + matrix[14] = dz; + matrix[15] = 1; +} + +static void +normalize (float *x, float *y, float *z) +{ + float d = sqrtf((*x) * (*x) + (*y) * (*y) + (*z) * (*z)); + *x /= d; + *y /= d; + *z /= d; +} + +void +mat_rotate (float *matrix, float x, float y, float z, float angle) +{ + normalize(&x, &y, &z); + + float s = sinf(angle); + float c = cosf(angle); + float m = 1 - c; + + matrix[0] = m * x * x + c; + matrix[1] = m * x * y - z * s; + matrix[2] = m * z * x + y * s; + matrix[3] = 0; + matrix[4] = m * x * y + z * s; + matrix[5] = m * y * y + c; + matrix[6] = m * y * z - x * s; + matrix[7] = 0; + matrix[8] = m * z * x - y * s; + matrix[9] = m * y * z + x * s; + matrix[10] = m * z * z + c; + matrix[11] = 0; + matrix[12] = 0; + matrix[13] = 0; + matrix[14] = 0; + matrix[15] = 1; +} + +void +mat_multiply (float *matrix, float *a, float *b) +{ + float result[16]; + for (int c = 0; c < 4; c++) { + for (int r = 0; r < 4; r++) { + int index = c * 4 + r; + float total = 0; + for (int i = 0; i < 4; i++) { + int p = i * 4 + r; + int q = c * 4 + i; + total += a[p] * b[q]; + } + result[index] = total; + } + } + for (int i = 0; i < 16; i++) + matrix[i] = result[i]; +} diff --git a/base/src/Model.cpp b/base/src/Model.cpp new file mode 100644 index 000000000..68eba21b8 --- /dev/null +++ b/base/src/Model.cpp @@ -0,0 +1,508 @@ + +#include +#include +#include +#include +//yash cahnge +// #include +#ifndef GL_H +#define GL_H +#include +#include +#endif +// #include +#include "Matrix.h" +#include "Program.h" +#include "GLUtils.h" + +struct point { + float x; + float y; + float z; +} __attribute__((packed)); + +struct color { + float r; + float g; + float b; +} __attribute__((packed)); + +// Each vertex has position, normal and color: +struct vertex { + struct point pos; + struct point normal; + struct color color; +} __attribute__((packed)); + +// Each triangle has three vertices: +struct triangle { + struct vertex vert[3]; +} __attribute__((packed)); + +// Each corner point has a position and a color: +struct corner { + struct point pos; + struct color color; +} __attribute__((packed)); + +// Each face has a single normal, four corner points, +// and two triangles: +struct face { + struct corner corner[4]; + struct point normal; + struct triangle tri[2]; +} __attribute__((packed)); + +// Each cube has six faces: +struct cube { + struct face face[6]; +} __attribute__((packed)); + +static GLuint vao, vbo; +static float matrix[16] = { 0 }; + +// Mouse movement: +static struct { + int x; + int y; +} pan; + +// Cube rotation axis: +static struct point rot = { + .x = 0.0f, + .y = 1.0f, + .z = 0.0f, +}; + +// Return the cross product of two vectors: +static void +cross (struct point *result, const struct point *a, const struct point *b) +{ + result->x = a->y * b->z - a->z * b->y; + result->y = a->z * b->x - a->x * b->z; + result->z = a->x * b->y - a->y * b->x; +} + +// Initialize the model: +void +model_init (void) +{ + // Define our cube: + // yash changes + // struct cube cube = + // { .face[0].corner = + // { { 0, 1, 0 } + // , { 1, 0, 0 } + // , { 0, 0, 0 } + // , { 1, 1, 0 } + // } + // , .face[1].corner = + // { { 0, 0, 0 } + // , { 1, 0, 1 } + // , { 0, 0, 1 } + // , { 1, 0, 0 } + // } + // , .face[2].corner = + // { { 1, 0, 0 } + // , { 1, 1, 1 } + // , { 1, 0, 1 } + // , { 1, 1, 0 } + // } + // , .face[3].corner = + // { { 1, 1, 0 } + // , { 0, 1, 1 } + // , { 1, 1, 1 } + // , { 0, 1, 0 } + // } + // , .face[4].corner = + // { { 0, 1, 0 } + // , { 0, 0, 1 } + // , { 0, 1, 1 } + // , { 0, 0, 0 } + // } + // , .face[5].corner = + // { { 0, 1, 1 } + // , { 1, 0, 1 } + // , { 1, 1, 1 } + // , { 0, 0, 1 } + // } + // } ; + + +struct cube cube; + cube.face[0].corner[0]={{ 0, 0, 0 },{0,0,0}}; // Bottom Face + cube.face[0].corner[1]={{ 0, 0, 0 },{0,0,0}}; + cube.face[0].corner[2]={{ 0, 0, 0 },{0,0,0}}; + cube.face[0].corner[3]={{ 0, 0, 0 },{0,0,0}}; + + cube.face[1].corner[0]={{ 0, 0, 0 },{0,0,0}}; // right face + cube.face[1].corner[1]={{ 0, 0, 0 },{0,0,0}}; + cube.face[1].corner[2]={{ 0, 0, 0 },{0,0,0}}; + cube.face[1].corner[3]={{ 0, 0, 0 },{0,0,0}}; + + cube.face[2].corner[0]={{ 0, 0, 0 },{0,0,0}}; + cube.face[2].corner[1]={{ 0, 0, 0 },{0,0,0}}; + cube.face[2].corner[2]={{ 0, 0, 0 },{0,0,0}}; + cube.face[2].corner[3]={{ 0, 0, 0 },{0,0,0}}; // top face + + cube.face[3].corner[0]={{ 0, 0, 0 },{0,0,0}}; + cube.face[3].corner[1]={{ 0, 0, 0 },{0,0,0}}; // left face + cube.face[3].corner[2]={{ 0, 0, 0 },{0,0,0}}; + cube.face[3].corner[3]={{ 0, 0, 0 },{0,0,0}}; + + cube.face[4].corner[0]={{ 0, 0, 0 },{0,0,0}}; + cube.face[4].corner[1]={{ 0, 0, 0 },{0,0,0}}; + cube.face[4].corner[2]={{ 0, 0, 0 },{0,0,0}}; // outside + cube.face[4].corner[3]={{ 0, 0, 0 },{0,0,0}}; + + cube.face[5].corner[0]={{ 0, 0, 0 },{0,0,0}}; + cube.face[5].corner[1]={{ 0, 0, 0 },{0,0,0}}; + cube.face[5].corner[2]={{ 0, 0, 0 },{0,0,0}}; //inside + cube.face[5].corner[3]={{ 0, 0, 0 },{0,0,0}}; + + // Generate colors for each corner based on its position: + FOREACH (cube.face, face) { + FOREACH (face->corner, corner) { + corner->color.r = corner->pos.x * 0.8f + 0.1f; + corner->color.g = corner->pos.y * 0.8f + 0.1f; + corner->color.b = corner->pos.z * 0.8f + 0.1f; + } + } + + // Center cube on the origin by translating corner points: + FOREACH (cube.face, face) { + FOREACH (face->corner, corner) { + corner->pos.x -= 1.0f; + corner->pos.y -= 1.0f; + corner->pos.z -= 1.0f; + } + } + // Yash Change + // FOREACH (cube.face, face) { + // FOREACH (face->corner, corner) { + // corner->pos.x -= 0.5f; + // corner->pos.y -= 0.5f; + // corner->pos.z -= 0.5f; + // } + // } + + // Face normals are cross product of two ribs: + FOREACH (cube.face, face) { + + // First rib is (corner 3 - corner 0): + struct point a = { + .x = face->corner[3].pos.x - face->corner[0].pos.x, + .y = face->corner[3].pos.y - face->corner[0].pos.y, + .z = face->corner[3].pos.z - face->corner[0].pos.z, + }; + + // Second rib is (corner 2 - corner 0): + struct point b = { + .x = face->corner[2].pos.x - face->corner[0].pos.x, + .y = face->corner[2].pos.y - face->corner[0].pos.y, + .z = face->corner[2].pos.z - face->corner[0].pos.z, + }; + + // Face normal is cross product of these two ribs: + cross(&face->normal, &a, &b); + } + + // Create two triangles for each face: + FOREACH (cube.face, face) { + + // Corners to compose triangles of, chosen in + // such a way that both triangles rotate CCW: + int index[2][3] = { { 0, 2, 1 }, { 1, 3, 0 } }; + + for (int t = 0; t < 2; t++) { + for (int v = 0; v < 3; v++) { + int c = index[t][v]; + struct corner *corner = &face->corner[c]; + struct vertex *vertex = &face->tri[t].vert[v]; + + vertex->pos = corner->pos; + vertex->normal = face->normal; + vertex->color = corner->color; + } + } + } + + // Copy vertices into separate array for drawing: + struct vertex vertex[6 * 2 * 3]; + struct vertex *cur = vertex; + + FOREACH (cube.face, face) { + FOREACH (face->tri, tri) { + for (int v = 0; v < 3; v++) { + *cur++ = tri->vert[v]; + } + } + } + + // Generate empty buffer: + glGenBuffers(1, &vbo); + + // Generate empty vertex array object: + glGenVertexArrays(1, &vao); + + // Set as current vertex array: + glBindVertexArray(vao); + glBindBuffer(GL_ARRAY_BUFFER, vbo); + + // Add vertex, color and normal data to buffers: + struct { + enum LocCube loc; + const void *ptr; + } + map[] = { + { .loc = LOC_CUBE_VERTEX + , .ptr = (void *) offsetof(struct vertex, pos) + } , + { .loc = LOC_CUBE_VCOLOR + , .ptr = (void *) offsetof(struct vertex, color) + } , + { .loc = LOC_CUBE_NORMAL + , .ptr = (void *) offsetof(struct vertex, normal) + } , + }; + + FOREACH (map, m) { + GLint loc = program_cube_loc(m->loc); + glEnableVertexAttribArray(loc); + glVertexAttribPointer(loc, 3, GL_FLOAT, GL_FALSE, sizeof(struct vertex), m->ptr); + } + + // Upload vertex data: + glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW); + // Generate a number for our textureID's unique handle + GLuint textureID; + glGenTextures(1, &textureID); + + // Bind to our texture handle + glBindTexture(GL_TEXTURE_2D, textureID); +} + +void matToTexture(unsigned char* buffer , GLenum minFilter, GLenum magFilter, GLenum wrapFilter, int width, int height) { + + // Catch silly-mistake texture interpolation method for magnification + if (magFilter == GL_LINEAR_MIPMAP_LINEAR || + magFilter == GL_LINEAR_MIPMAP_NEAREST || + magFilter == GL_NEAREST_MIPMAP_LINEAR || + magFilter == GL_NEAREST_MIPMAP_NEAREST) + { + // printf("You can't use MIPMAPs for magnification - setting filter to GL_LINEAR\n"); + std::cout << "You can't use MIPMAPs for magnification - setting filter to GL_LINEAR" << std::endl; + magFilter = GL_LINEAR; + } + + // Set texture interpolation methods for minification and magnification + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, minFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, magFilter); + + // Set texture clamping method + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, wrapFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, wrapFilter); + + // Set incoming texture format to: + // GL_BGR for CV_CAP_OPENNI_BGR_IMAGE, + // GL_LUMINANCE for CV_CAP_OPENNI_DISPARITY_MAP, + // Work out other mappings as required ( there's a list in comments in main() ) + GLenum inputColourFormat = GL_RGBA;// GL_BGR + if (3 == 1) + { + inputColourFormat = GL_LUMINANCE; + } + + // Create the texture + glTexImage2D(GL_TEXTURE_2D, // Type of texture + 0, // Pyramid level (for mip-mapping) - 0 is the top level + GL_RGBA, // CHanged from rgb to rgba Internal colour format to convert to + width, // Image width i.e. 640 for Kinect in standard mode + height, // Image height i.e. 480 for Kinect in standard mode + 0, // Border width in pixels (can either be 1 or 0) + inputColourFormat, // Input image format (i.e. GL_RGB, GL_RGBA, GL_BGR etc.) + GL_UNSIGNED_BYTE, // Image data type + buffer); // The actual image data itself + + // If we're using mipmaps then generate them. Note: This requires OpenGL 3.0 or higher + if (minFilter == GL_LINEAR_MIPMAP_LINEAR || + minFilter == GL_LINEAR_MIPMAP_NEAREST || + minFilter == GL_NEAREST_MIPMAP_LINEAR || + minFilter == GL_NEAREST_MIPMAP_NEAREST) + { + // printf("Will Generate MinMap \n"); + // std::cout << "Will Generate minmap" << std::endl; + glGenerateMipmap(GL_TEXTURE_2D); + } +} + + +// void +// draw_frames(void) +// { +// unsigned char* buffer = (unsigned char*) malloc(640 * 480 * 3); +// // Fill the buffer with green color +// for (int i = 0; i < 640 * 480 * 3; i += 3) { +// buffer[i] = 0; // Blue channel +// buffer[i + 1] = 255; // Green channel +// buffer[i + 2] = 0; // Red channel +// } +// int window_height = 480; +// int window_width = 640; + +// static float angle = 0.0f; + +// // Rotate slightly: +// angle += 0.00f; + +// // Setup rotation matrix: +// mat_rotate(matrix, rot.x, rot.y, rot.z, angle); + +// // cv::Mat frame(480, 640, CV_8UC3, cv::Scalar(0, 255, 0)); +// glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); +// glMatrixMode(GL_MODELVIEW); // Operate on model-view matrix + +// glEnable(GL_TEXTURE_2D); +// GLuint image_tex = matToTexture(buffer, GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR, GL_CLAMP); +// glBegin(GL_QUADS); +// glTexCoord2i(0, 0); glVertex2i(0, 0); +// glTexCoord2i(0, 1); glVertex2i(0, window_height); +// glTexCoord2i(1, 1); glVertex2i(window_width, window_height); +// glTexCoord2i(1, 0); glVertex2i(window_width, 0); +// glEnd(); + +// glDeleteTextures(1, &image_tex); +// glDisable(GL_TEXTURE_2D); +// } +int global_count = 0; + +void +draw_frames(void) +{ + // unsigned char* buffer = (unsigned char*) malloc(640 * 480 * 3); + // // Fill the buffer with green color + + // if(global_count % 2 == 0) + // { + // for (int i = 0; i < 640 * 480 * 3; i += 3) { + // buffer[i] = 0; // Blue channel + // buffer[i + 1] = 255; // Green channel + // buffer[i + 2] = 0; // Red channel + // } + // } + // else + // { + // for (int i = 0; i < 640 * 480 * 3; i += 3) { + // buffer[i] = 255; // Blue channel + // buffer[i + 1] = 0; // Green channel + // buffer[i + 2] = 0; // Red channel + // } + + // } + unsigned char *buffer = (unsigned char *)malloc(640 * 480 * 3); + // Fill the buffer with green color + + if (global_count % 2 == 0) + { + for (int i = 0; i < 640 * 480 * 3; i += 3) + { + buffer[i] = 0; // Blue channel + buffer[i + 1] = 255; // Green channel + buffer[i + 2] = 0; // Red channel + } + } + else + { + for (int i = 0; i < 640 * 480 * 3; i += 3) + { + buffer[i] = 255; // Blue channel + buffer[i + 1] = 0; // Green channel + buffer[i + 2] = 0; // Red channel + } + } + + + int window_height = 480; + int window_width = 640; + + static float angle = 0.0f; + + // Rotate slightly: + angle += 0.00f; + + // Setup rotation matrix: + mat_rotate(matrix, rot.x, rot.y, rot.z, angle); + + // GLuint image_tex = matToTexture(buffer, GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR, GL_CLAMP); + + // Don't clip against background: + glClear(GL_DEPTH_BUFFER_BIT); + + // Draw all the triangles in the buffer: + glBindVertexArray(vao); + glDrawArrays(GL_TRIANGLES, 0, 12 * 3); + global_count++; +} + +void drawCameraFrame(void* frameData, int width, int height){ + + static float angle = 0.0f; + + mat_rotate(matrix, rot.x, rot.y, rot.z, angle); + + matToTexture((unsigned char*)frameData, GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR, GL_CLAMP_TO_BORDER, width, height); + + // Don't clip against background: + glClear(GL_DEPTH_BUFFER_BIT); + + // Draw all the triangles in the buffer: + glBindVertexArray(vao); + glDrawArrays(GL_TRIANGLES, 0, 12 * 3); +} + +void +model_draw (void) +{ + static float angle = 0.0f; + + // Rotate slightly: + angle += 0.00f; + + // Setup rotation matrix: + mat_rotate(matrix, rot.x, rot.y, rot.z, angle); + + // Use our own shaders: + // Main program for rendering + program_cube_use(); + + // Don't clip against background: + glClear(GL_DEPTH_BUFFER_BIT); + + // Draw all the triangles in the buffer: + glBindVertexArray(vao); + glDrawArrays(GL_TRIANGLES, 0, 12 * 3); +} + +const float * +model_matrix (void) +{ + return matrix; +} + +void +model_pan_start (int x, int y) +{ + pan.x = x; + pan.y = y; +} + +void +model_pan_move (int x, int y) +{ + int dx = pan.x - x; + int dy = pan.y - y; + + // Rotation vector is perpendicular to (dx, dy): + rot.x = dy; + rot.y = -dx; +} diff --git a/base/src/Program.cpp b/base/src/Program.cpp new file mode 100644 index 000000000..0d175a0b1 --- /dev/null +++ b/base/src/Program.cpp @@ -0,0 +1,315 @@ +#include +#include +#include +#include +#include +//yash change +// #include +#ifndef GL_H +#define GL_H +#include +#include +// #include +// #include +#endif +#include +#include "Model.h" +#include "View.h" +#include "Program.h" +#include "GLUtils.h" + + +const GLchar *CUBE_VERTEX_SOURCE = +"#version 330\n" +"uniform mat4 view_matrix;\n" +"uniform mat4 model_matrix;\n" +"in vec3 vertex;\n" +"in vec3 vcolor;\n" +"in vec3 normal;\n" +"out vec3 fcolor;\n" +"out vec3 fpos;\n" +"out float fdot;\n" +"void main (void)\n" +"{\n" +" vec4 modelspace = model_matrix * vec4(vertex, 1.0);\n" +" gl_Position = view_matrix * modelspace;\n" +" fcolor = vcolor;\n" +" vec4 sight = vec4(0, 0, -1.0, 0.0);\n" +" vec4 wnormal = model_matrix * vec4(normal, 0.0);\n" +" fdot = dot(sight, wnormal);\n" +" fpos = modelspace.xyz;\n" +"}\n"; + + +const GLchar *CUBE_FRAGMENT_SOURCE = +"#version 330\n" +"in vec3 fcolor;\n" +"in vec3 fpos;\n" +"in float fdot;\n" +"out vec4 fragcolor;\n" +"void main (void)\n" +"{\n" +" if (!gl_FrontFacing)\n" +" return;\n" +" vec3 linear = pow(fcolor, vec3(1.0 / 2.2));\n" +" float dst = distance(vec3(0, 0, 2), fpos) * 0.4;\n" +" vec3 scaled = linear * vec3(fdot * dst);\n" +" fragcolor = vec4(pow(scaled, vec3(2.2)), 0.0);\n" +"}\n"; + +const GLchar *BKGD_VERTEX_SOURCE = +"#version 150\n" +"in vec2 vertex;\n" +"in vec2 texture;\n" +"out vec2 ftex;\n" +"void main (void)\n" +"{\n" +" ftex = vec2(texture.x, 1.0 - texture.y);\n" +" gl_Position = vec4(vertex, 0.5, 1.0);\n" +"}\n"; +// " ftex = texture;\n" +// const GLchar *BKGD_VERTEX_SOURCE = +// "#version 150\n" +// "in vec2 vertex;\n" +// "in vec2 texture;\n" +// "out vec2 ftex;\n" +// "uniform mat4 view_matrix;\n" // Include any necessary matrices +// "uniform mat4 model_matrix;\n" +// "void main (void)\n" +// "{\n" +// " ftex = (model_matrix * vec4(texture, 0.0, 1.0)).xy;\n" +// " gl_Position = view_matrix * vec4(vertex, 0.5, 1.0);\n" +// "}\n"; + + +const GLchar *BKGD_FRAGMENT_SOURCE = +"#version 150\n" +"uniform sampler2D tex;\n" +"in vec2 ftex;\n" +"out vec4 fragcolor;\n" +"void main (void)\n" +"{\n" +" fragcolor = texture(tex, ftex);\n" +"};\n"; + + + +// Shader structure: +struct shader { + const uint8_t *buf; + const uint8_t *end; + GLuint id; +}; + +// Location definitions: +enum loc_type { + UNIFORM, + ATTRIBUTE, +}; + +struct loc { + const char *name; + enum loc_type type; + GLint id; +}; + +static struct loc loc_bkgd[] = { + [LOC_BKGD_VERTEX] = { "vertex", ATTRIBUTE }, + [LOC_BKGD_TEXTURE] = { "texture", ATTRIBUTE }, +}; + +static struct loc loc_cube[] = { + [LOC_CUBE_VIEW] = { "view_matrix", UNIFORM }, + [LOC_CUBE_MODEL] = { "model_matrix", UNIFORM }, + [LOC_CUBE_VERTEX] = { "vertex", ATTRIBUTE }, + [LOC_CUBE_VCOLOR] = { "vcolor", ATTRIBUTE }, + [LOC_CUBE_NORMAL] = { "normal", ATTRIBUTE }, +}; + +// Programs: +enum { + BKGD, + CUBE, +}; + +struct program { + struct { + struct shader vert; + struct shader frag; + } shader; + struct loc *loc; + size_t nloc; + GLuint id; +}; + +static program programs[2] = { + { + { + { + (const uint8_t *)BKGD_VERTEX_SOURCE, + (const uint8_t *)BKGD_VERTEX_SOURCE + strlen(BKGD_VERTEX_SOURCE) + }, + { + (const uint8_t *)BKGD_FRAGMENT_SOURCE, + (const uint8_t *)BKGD_FRAGMENT_SOURCE + strlen(BKGD_FRAGMENT_SOURCE) + } + }, + loc_bkgd, + NELEM(loc_bkgd), + 0 + }, + { + { + { + (const uint8_t *)CUBE_VERTEX_SOURCE, + (const uint8_t *)CUBE_VERTEX_SOURCE + strlen(CUBE_VERTEX_SOURCE) + }, + { + (const uint8_t *)CUBE_FRAGMENT_SOURCE, + (const uint8_t *)CUBE_FRAGMENT_SOURCE + strlen(CUBE_FRAGMENT_SOURCE) + } + }, + loc_cube, + NELEM(loc_cube), + 1 + } +}; + +static void +check_compile (GLuint shader) +{ + GLint length; + + glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &length); + + if (length <= 1) + return; + + GLchar *log = (GLchar *)calloc(length, sizeof(GLchar)); + glGetShaderInfoLog(shader, length, NULL, log); + fprintf(stderr, "glCompileShader failed:\n%s\n", log); + free(log); +} + +static void +check_link (GLuint program) +{ + GLint status, length; + + glGetProgramiv(program, GL_LINK_STATUS, &status); + if (status != GL_FALSE) + return; + + glGetProgramiv(program, GL_INFO_LOG_LENGTH, &length); + GLchar *log = (GLchar *)calloc(length, sizeof(GLchar)); + glGetProgramInfoLog(program, length, NULL, log); + fprintf(stderr, "glLinkProgram failed: %s\n", log); + free(log); +} + +static void +create_shader (struct shader *shader, GLenum type) +{ + auto x = glGetString(GL_SHADING_LANGUAGE_VERSION); + GLenum err = glewInit(); + if (err != GLEW_OK) + { + std::cout << "GLEW IS NOT OK" << std::endl; + exit(1); // or handle the error in a nicer way + } + if (!GLEW_VERSION_2_1) // check that the machine supports the 2.1 API. + { + std::cout << "GLEW VERSION NOT SUPPORTED " << std::endl; + exit(1); // or handle the error in a nicer way + + } + const GLchar *buf = (const GLchar *) shader->buf; + GLint len = shader->end - shader->buf; + if (type == GL_FRAGMENT_SHADER){ + std::cout << "FRAGMENT _SHADERS " << std::endl; + shader->id = glCreateShader(GL_FRAGMENT_SHADER); + } + else + { + std::cout << "VERTEX_SHADERS "<< GL_VERTEX_SHADER << std::endl; + shader->id = glCreateShader(GL_VERTEX_SHADER); + } + + glShaderSource(shader->id, 1, &buf, &len); + glCompileShader(shader->id); + + check_compile(shader->id); +} + +static void +program_init (struct program *p) +{ + struct shader *vert = &p->shader.vert; + struct shader *frag = &p->shader.frag; + + create_shader(vert, GL_VERTEX_SHADER); + create_shader(frag, GL_FRAGMENT_SHADER); + + p->id = glCreateProgram(); + + glAttachShader(p->id, vert->id); + glAttachShader(p->id, frag->id); + + glLinkProgram(p->id); + check_link(p->id); + + glDetachShader(p->id, vert->id); + glDetachShader(p->id, frag->id); + + glDeleteShader(vert->id); + glDeleteShader(frag->id); + + FOREACH_NELEM (p->loc, p->nloc, l) { + switch (l->type) + { + case UNIFORM: + l->id = glGetUniformLocation(p->id, l->name); + break; + + case ATTRIBUTE: + l->id = glGetAttribLocation(p->id, l->name); + break; + } + } +} + +void +programs_init (void) +{ + FOREACH (programs, p) + program_init(p); +} + +void +program_cube_use (void) +{ + glUseProgram(programs[CUBE].id); + + glUniformMatrix4fv(loc_cube[LOC_CUBE_VIEW ].id, 1, GL_FALSE, view_matrix()); + glUniformMatrix4fv(loc_cube[LOC_CUBE_MODEL].id, 1, GL_FALSE, model_matrix()); +} + +void +program_bkgd_use (void) +{ + glUseProgram(programs[BKGD].id); + + glUniform1i(glGetUniformLocation(programs[BKGD].id, "tex"), 0); +} + +GLint +program_bkgd_loc (const enum LocBkgd index) +{ + return loc_bkgd[index].id; +} + +GLint +program_cube_loc (const enum LocCube index) +{ + return loc_cube[index].id; +} diff --git a/base/src/RTSPClientSrc.cpp b/base/src/RTSPClientSrc.cpp index c88e4afad..e582ce5f3 100644 --- a/base/src/RTSPClientSrc.cpp +++ b/base/src/RTSPClientSrc.cpp @@ -111,6 +111,21 @@ class RTSPClientSrc::Detail bConnected = true; return bConnected; } + + frame_sp prependSpsPpsToFrame(std::string id) + { + auto spsPpsData = pFormatCtx->streams[0]->codec->extradata; + auto spsPpsSize = pFormatCtx->streams[0]->codec->extradata_size; + size_t totalFrameSize = packet.size + spsPpsSize; + + auto frm = myModule->makeFrame(totalFrameSize, id); + uint8_t* frameData = static_cast(frm->data()); + memcpy(frameData, spsPpsData, spsPpsSize); + frameData += spsPpsSize; + memcpy(frameData, packet.data, packet.size); + return frm; + } + bool readBuffer() { frame_container outFrames; @@ -142,15 +157,11 @@ class RTSPClientSrc::Detail H264Utils::getNALUnit((const char*)packet.data, packet.size, offset); packet.data += offset - 4; packet.size -= offset - 4; - auto spsPpsData = pFormatCtx->streams[0]->codec->extradata; - auto spsPpsSize = pFormatCtx->streams[0]->codec->extradata_size;; - size_t totalFrameSize = packet.size + spsPpsSize; - - frm = myModule->makeFrame(totalFrameSize, it->second); - uint8_t* frameData = static_cast(frm->data()); - memcpy(frameData, spsPpsData, spsPpsSize); - frameData += spsPpsSize; - memcpy(frameData, packet.data, packet.size); + frm = prependSpsPpsToFrame(it->second); + } + else if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE) + { + frm = prependSpsPpsToFrame(it->second); } else { diff --git a/base/src/View.cpp b/base/src/View.cpp new file mode 100644 index 000000000..5534e7751 --- /dev/null +++ b/base/src/View.cpp @@ -0,0 +1,72 @@ +#include "Matrix.h" + +// static struct { +// float matrix[16]; +// float width; +// float height; +// float z; +// } +// state = { +// .z = 2.0f, +// }; + +struct State { + float matrix[16]; + float width; + float height; + float z; +} state { + {0}, // Initialize all elements of matrix to 0 + 0, // Initialize width to 0 + 0, // Initialize height to 0 + 2.0f // Initialize z to 2.0f +}; + +const float * +view_matrix (void) +{ + return state.matrix; +} + +static void +view_recalc (void) +{ + float aspect_ratio = state.width / state.height; + float matrix_frustum[16]; + float matrix_translate[16]; + + // Create frustum matrix: + mat_frustum(matrix_frustum, 0.7, aspect_ratio, 0.5, 6); + + // Create frustum translation matrix: + mat_translate(matrix_translate, 0, 0, state.z); + + // Combine into perspective matrix: + mat_multiply(state.matrix, matrix_frustum, matrix_translate); +} + +void +view_set_window (int width, int height) +{ + state.width = width; + state.height = height; + view_recalc(); +} + +void +view_z_decrease (void) +{ + if (state.z > 1.5f) { + state.z -= 0.1f; + view_recalc(); + } +} + +void +view_z_increase (void) +{ + if (state.z < 5.0f) { + state.z += 0.1f; + view_recalc(); + } +} diff --git a/base/test/gtkglrenderer_tests.cpp b/base/test/gtkglrenderer_tests.cpp new file mode 100644 index 000000000..86fa99343 --- /dev/null +++ b/base/test/gtkglrenderer_tests.cpp @@ -0,0 +1,316 @@ +#include + +#include "PipeLine.h" +#include "NvV4L2Camera.h" +#include "NvTransform.h" +#include "VirtualCameraSink.h" +#include "FileWriterModule.h" +#include "DMAFDToHostCopy.h" +#include "StatSink.h" +#include "ResizeNPPI.h" +#include "AffineTransform.h" +#include "H264Decoder.h" +#include "CudaMemCopy.h" +#include "H264Metadata.h" +#include "RTSPClientSrc.h" +#include "EglRenderer.h" +#include "GtkGlRenderer.h" +#include "FileWriterModule.h" +#include "NvArgusCamera.h" +#include "MemTypeConversion.h" +#include +// // #include +// #include +// #define PRIMARY_WINDOW_WIDTH 1920 +// #define PRIMARY_WINDOW_HEIGHT 1080 + +// #define ASSETS_PATH "assets_ui/" +// #define GLADE_PATH ASSETS_PATH "ui/" +// #define STYLE_PATH ASSETS_PATH "css/" +// #define CONFIG_PATH "config/" + +PipeLine p("test"); +GtkWidget *glarea; +GtkWidget *glarea2; +GtkWidget *glarea3; +GtkWidget *glarea4; +GtkWidget *glAreaSwitch; +BOOST_AUTO_TEST_SUITE(gtkglrenderer_tests) + +struct rtsp_client_tests_data { + string outFile; + string empty; +}; + +boost::shared_ptrGtkGl; + +BOOST_AUTO_TEST_CASE(basic, *boost::unit_test::disabled()) +{ + + // Logger::setLogLevel(boost::log::trivial::severity_level::info); + + // auto source = boost::shared_ptr(new NvV4L2Camera(NvV4L2CameraProps(640, 480, 2))); + + // GtkGlRendererProps gtkglsinkProps("atlui.glade", 1920, 1080); + + // auto sink = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); + // source->setNext(sink); + + // PipeLine p("test"); + // p.appendModule(source); + // BOOST_TEST(p.init()); + + // p.run_all_threaded(); + // boost::this_thread::sleep_for(boost::chrono::seconds(10000000000)); + // gtk_main(); + // p.stop(); + // p.term(); + // p.wait_for_all(); +} +void lauchAtlPipeline() +{ + Logger::setLogLevel(boost::log::trivial::severity_level::info); + + auto source = boost::shared_ptr(new NvV4L2Camera(NvV4L2CameraProps(640, 360, 10))); + + auto transform = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + source->setNext(transform); + + // GtkGlRendererProps gtkglsinkProps(glarea, 1280, 720); + // auto sink = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); + // transform->setNext(sink); + + // GtkGlRendererProps gtkglsinkProps2(glarea2, 1280, 720); + // auto sink2 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps2)); + // transform->setNext(sink2); + + // GtkGlRendererProps gtkglsinkProps3(glarea3, 1280, 720); + // auto sink3 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps3)); + // transform->setNext(sink3); + + // GtkGlRendererProps gtkglsinkProps4(glarea4, 1280, 720); + // auto sink4 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps4)); + // transform->setNext(sink4); + + auto sink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0))); + transform->setNext(sink); + + p.appendModule(source); + p.init(); + Logger::setLogLevel(boost::log::trivial::severity_level::info); + p.run_all_threaded(); +} +boost::shared_ptr launchPipeline() +{ + rtsp_client_tests_data d; + string url = "rtsp://root:m4m1g0@10.102.10.77/axis-media/media.amp"; + + //RTSP + RTSPClientSrcProps rtspProps = RTSPClientSrcProps(url, d.empty, d.empty); + auto source = boost::shared_ptr(new RTSPClientSrc(rtspProps)); + auto meta = framemetadata_sp(new H264Metadata()); + source->addOutputPin(meta); + + //H264DECODER + H264DecoderProps decoder_1_Props = H264DecoderProps(); + auto decoder_1 = boost::shared_ptr(new H264Decoder(decoder_1_Props)); + source->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); + source->setNext(decoder_1); + + //NV-TRANSFORM + auto transform = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + decoder_1->setNext(transform); + + //MEMCONVERT TO DEVICE + auto stream = cudastream_sp(new ApraCudaStream); + auto memconversion1 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::CUDA_DEVICE, stream))); + transform->setNext(memconversion1); + + //RESIZE-NPPI + auto resizenppi = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream))); + memconversion1->setNext(resizenppi); + + //MEMCONVERT TO DMA + auto memconversion2 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::DMABUF, stream))); + resizenppi->setNext(memconversion2); + + GtkGlRendererProps gtkglsinkProps(glarea, 640, 360); + GtkGl = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); + memconversion2->setNext(GtkGl); + + //auto eglsink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0,0))); + // memconversion2->setNext(eglsink); + + // GtkGlRendererProps gtkglsinkProps2(glarea2, 1024, 1024); + // auto sink2 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps2)); + // memconversion2->setNext(sink2); + + // GtkGlRendererProps gtkglsinkProps3(glarea3, 1024, 1024); + // auto sink3 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps3)); + // memconversion2->setNext(sink3); + + // GtkGlRendererProps gtkglsinkProps4(glarea4, 1024, 1024); + // auto sink4 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps4)); + // memconversion2->setNext(sink4); + + // auto eglsink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0,0))); + // decoder_1->setNext(eglsink); + + p.appendModule(source); + p.init(); + Logger::setLogLevel(boost::log::trivial::severity_level::info); + p.run_all_threaded(); + + return GtkGl; +; + +} + +void launchPipelineRTSP() +{ + rtsp_client_tests_data d; + string url = "rtsp://root:m4m1g0@10.102.10.77/axis-media/media.amp"; + RTSPClientSrcProps rtspProps = RTSPClientSrcProps(url, d.empty, d.empty); + auto source = boost::shared_ptr(new RTSPClientSrc(rtspProps)); + auto meta = framemetadata_sp(new H264Metadata()); + source->addOutputPin(meta); + + H264DecoderProps decoder_1_Props = H264DecoderProps(); + auto decoder_1 = boost::shared_ptr(new H264Decoder(decoder_1_Props)); + source->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); + source->setNext(decoder_1); + + + + auto transform = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + decoder_1->setNext(transform); + + // auto stream = cudastream_sp(new ApraCudaStream); + // auto copy1 = boost::shared_ptr(new CudaMemCopy(CudaMemCopyProps(cudaMemcpyHostToDevice, stream))); + // transform->setNext(copy1); + + // auto m2 = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream))); + // copy1->setNext(m2); + // auto copy2 = boost::shared_ptr(new CudaMemCopy(CudaMemCopyProps(cudaMemcpyDeviceToHost, stream))); + // m2->setNext(copy2); + // auto outputPinId = copy2->getAllOutputPinsByType(FrameMetadata::RAW_IMAGE)[0]; + + + GtkGlRendererProps gtkglsinkProps(glarea, 1280, 720); + auto sink = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); + transform->setNext(sink); + + GtkGlRendererProps gtkglsinkProps2(glarea2, 1280, 720); + auto sink2 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps2)); + transform->setNext(sink2); + + GtkGlRendererProps gtkglsinkProps3(glarea3, 1280, 720); + auto sink3 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps3)); + transform->setNext(sink3); + + GtkGlRendererProps gtkglsinkProps4(glarea4, 1280, 720); + auto sink4 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps4)); + transform->setNext(sink4); + + // auto eglsink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0,0))); + // decoder_1->setNext(eglsink); + + p.appendModule(source); + p.init(); + Logger::setLogLevel(boost::log::trivial::severity_level::info); + p.run_all_threaded(); + +} + + +void screenChanged(GtkWidget *widget, GdkScreen *old_screen, + gpointer userdata) +{ + /* To check if the display supports alpha channels, get the visual */ + GdkScreen *screen = gtk_widget_get_screen(widget); + GdkVisual *visual = gdk_screen_get_rgba_visual(screen); + if (!visual) + { + printf("Your screen does not support alpha channels!\n"); + visual = gdk_screen_get_system_visual(screen); + } + else + { + printf("Your screen supports alpha channels!\n"); + } + gtk_widget_set_visual(widget, visual); +} + +void my_getsize(GtkWidget *widget, GtkAllocation *allocation, void *data) { + printf("width = %d, height = %d\n", allocation->width, allocation->height); +} + +static gboolean hide_gl_area(gpointer data) { + gtk_widget_hide(glarea); + gtk_widget_hide(glAreaSwitch); + return G_SOURCE_REMOVE; // Remove the timeout source after execution +} + +static gboolean change_gl_area(gpointer data) { + GtkGl->changeProps(glAreaSwitch, 640, 360); + GtkGl->step(); + return G_SOURCE_REMOVE; // Change the glarea before showing +} + +static gboolean show_gl_area(gpointer data) { + //gtk_widget_show(glarea); + gtk_widget_show(glAreaSwitch); + return G_SOURCE_REMOVE; // Remove the timeout source after execution +} + + +BOOST_AUTO_TEST_CASE(windowInit2, *boost::unit_test::disabled()) +{ + if (!gtk_init_check(NULL, NULL)) // yash argc argv + { + fputs("Could not initialize GTK", stderr); + } + GtkBuilder *m_builder = gtk_builder_new(); + if (!m_builder) + { + LOG_ERROR << "Builder not found"; + } + gtk_builder_add_from_file(m_builder, "/mnt/disks/ssd/vinayak/backup/GtkRendererModule/ApraPipes/assets/appui.glade", NULL); + std::cout << "ui glade found" << std::endl; + + GtkWidget *window = GTK_WIDGET(gtk_window_new(GTK_WINDOW_TOPLEVEL)); + g_object_ref(window); + gtk_window_set_default_size(GTK_WINDOW(window), 1280, 400); + gtk_window_set_resizable(GTK_WINDOW(window), FALSE); + gtk_widget_set_app_paintable(window, TRUE); + + do + { + gtk_main_iteration(); + } while (gtk_events_pending()); + + GtkWidget *mainFixed = GTK_WIDGET(gtk_builder_get_object(m_builder, "mainWidget")); + gtk_container_add(GTK_CONTAINER(window), mainFixed); + glarea = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw")); + + glAreaSwitch = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw2")); + // glarea2 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw2")); + // glarea3 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw3")); + // glarea4 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw4")); + g_signal_connect(window, "destroy", G_CALLBACK(gtk_main_quit), NULL); + //g_signal_connect(glarea, "size-allocate", G_CALLBACK(my_getsize), NULL); + launchPipeline(); + gtk_widget_show_all(window); + g_timeout_add(5000, hide_gl_area, NULL); + g_timeout_add(7000, change_gl_area, NULL); + g_timeout_add(9000, show_gl_area, NULL); + gtk_main(); + + p.stop(); + p.term(); + p.wait_for_all(); +} + + + +BOOST_AUTO_TEST_SUITE_END() From 19efcd0aa2edf54a5048728b2ff40d7486f4f7e4 Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Tue, 30 Jan 2024 11:44:23 +0530 Subject: [PATCH 14/19] GTLGL working perfectly * --- base/include/GtkGlRenderer.h | 4 +- base/include/Model.h | 4 - base/src/Background.cpp | 18 +- base/src/GtkGlRenderer.cpp | 155 ++++++++-- base/src/Model.cpp | 458 ++++-------------------------- base/src/OrderedCacheOfFiles.cpp | 2 +- base/src/Program.cpp | 98 +------ base/test/gtkglrenderer_tests.cpp | 442 +++++++++++++++++++++------- 8 files changed, 534 insertions(+), 647 deletions(-) diff --git a/base/include/GtkGlRenderer.h b/base/include/GtkGlRenderer.h index 87960b16b..4c0565d6d 100644 --- a/base/include/GtkGlRenderer.h +++ b/base/include/GtkGlRenderer.h @@ -14,7 +14,8 @@ class GtkGlRendererProps : public ModuleProps windowHeight = _windowHeight; } GtkWidget* glArea; - int windowWidth, windowHeight; + int windowWidth = 0; + int windowHeight = 0; }; class GtkGlRenderer : public Module @@ -33,6 +34,7 @@ class GtkGlRenderer : public Module bool processSOS(frame_sp &frame); bool validateInputPins(); bool shouldTriggerSOS(); + bool handleCommand(Command::CommandType type, frame_sp &frame); private: class Detail; boost::shared_ptr mDetail; diff --git a/base/include/Model.h b/base/include/Model.h index 55d23b67b..d7de42bd7 100644 --- a/base/include/Model.h +++ b/base/include/Model.h @@ -1,7 +1,3 @@ void model_init (void); -void model_draw (void); -void draw_frames(void); void drawCameraFrame(void* frameData, int width, int height); const float *model_matrix(void); -void model_pan_start (int x, int y); -void model_pan_move (int x, int y); diff --git a/base/src/Background.cpp b/base/src/Background.cpp index 0f01be471..438141154 100644 --- a/base/src/Background.cpp +++ b/base/src/Background.cpp @@ -74,24 +74,14 @@ background_draw (void) 0, 2, 3 }; - static GLubyte triangle2[] = { - 4, 5, 6, - 4, 6, 7 - }; - program_bkgd_use(); - glActiveTexture(GL_TEXTURE0); - glBindTexture(GL_TEXTURE_2D, texture); + // glActiveTexture(GL_TEXTURE0); + // glBindTexture(GL_TEXTURE_2D, texture); glBindVertexArray(vao); - //yash change - // glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, index); + // //yash change + // // glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, index); glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, triangle1); - - // Draw the second triangle - glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, triangle2); - //yash change end - glBindVertexArray(0); } void diff --git a/base/src/GtkGlRenderer.cpp b/base/src/GtkGlRenderer.cpp index 33042191d..deb119a54 100644 --- a/base/src/GtkGlRenderer.cpp +++ b/base/src/GtkGlRenderer.cpp @@ -36,6 +36,7 @@ class GtkGlRenderer::Detail static void on_resize(GtkGLArea *area, gint width, gint height, gpointer data) { + LOG_ERROR << "this pointer in resize is " << data; printf("In resize width = %d, height = %d\n", width, height); view_set_window(width, height); background_set_window(width, height); @@ -47,15 +48,50 @@ class GtkGlRenderer::Detail static gboolean on_render(GtkGLArea *glarea, GdkGLContext *context, gpointer data) { - // Clear canvas: + //LOG_ERROR<<"DATA IN RENDER "<mProps.windowWidth<<" "<mProps.windowWidth == 2) + // { + // size_t bufferSize = static_cast(640) * 360 * 3; + // memset(glarea, 0, bufferSize); + // for (size_t i = 1; i < bufferSize; i += 3) { + // glarea[i] = 150; + // } + // } + + // Clear canvas: + + + //LOG_ERROR << "Window width in on_render is " <mProps.windowWidth; + // LOG_DEBUG << "Coming Inside Renderer"; + //LOG_ERROR << "GTKGL POINTER IS===========================>>>>"<< detailInstance->mProps.windowWidth<<" "<< glarea; if (detailInstance->isMetadataSet == false) { - LOG_INFO << "Metadata is Not Set "; + LOG_TRACE << "Metadata is Not Set "; return TRUE; } - + gint x, y; + + // Check if the child widget is realized (has an associated window) + if (gtk_widget_get_realized(GTK_WIDGET(glarea))) { + // Get the immediate parent of the child + GtkWidget *parent = gtk_widget_get_parent(GTK_WIDGET(glarea)); + + // Check if the parent is realized + if (parent && gtk_widget_get_realized(parent)) { + // Get the position of the child relative to its parent + gtk_widget_translate_coordinates(GTK_WIDGET(glarea), parent, 0, 0, &x, &y); + // g_print("Child position relative to parent: x=%d, y=%d\n", x, y); + //LOG_ERROR << "Child position relative to parent "<< x << "=====" << y << "==============" << detailInstance->mProps.windowWidth ; + } else { + // g_print("Error: Child's parent is not realized.\n"); + } + } else { + // g_print("Error: Child widget is not realized.\n"); + } if (!detailInstance->cachedFrame.get()) { LOG_ERROR << "Got Empty Frame"; @@ -90,7 +126,19 @@ class GtkGlRenderer::Detail static gboolean on_realize(GtkGLArea *glarea, GdkGLContext *context, gpointer data) // Process SOS { + //getting current time + std::chrono::time_point t = std::chrono::system_clock::now(); + auto dur = std::chrono::duration_cast(t.time_since_epoch()); + auto timeStamp = dur.count(); + auto diff = timeStamp - 1705559852000; + LOG_ERROR<<"On realize is called "; + LOG_ERROR<<"Time difference is "<mProps.windowWidth; return TRUE; } @@ -150,24 +199,25 @@ class GtkGlRenderer::Detail // } - // void on_unrealize() - // { - // GdkGLContext *glcontext = gtk_gl_area_get_context(GTK_GL_AREA(glarea)); - // GdkWindow *glwindow = gdk_gl_context_get_window(glcontext); - // GdkFrameClock *frame_clock = gdk_window_get_frame_clock(glwindow); + static void on_unrealize(GtkGLArea *glarea, gint width, gint height, gpointer data) + { + LOG_ERROR << "UNREALIZE SIGNAL==================================>>>>>>>>>>>>>>>>>"; + // GdkGLContext *glcontext = gtk_gl_area_get_context(GTK_GL_AREA(glarea)); + // GdkWindow *glwindow = gdk_gl_context_get_window(glcontext); + // GdkFrameClock *frame_clock = gdk_window_get_frame_clock(glwindow); - // // Disconnect the update signal from frame_clock - // //g_signal_handlers_disconnect_by_func(frame_clock, G_CALLBACK(gtk_gl_area_queue_render), G_OBJECT(glarea)); + // // Disconnect the update signal from frame_clock + // g_signal_handlers_disconnect_by_func(frame_clock, gtk_gl_area_queue_render, G_OBJECT(glarea)); - // // Get the parent container - // GtkWidget *parent_container = gtk_widget_get_parent(glarea); + // // // Get the parent container + // GtkWidget *parent_container = gtk_widget_get_parent(glarea); - // // Remove the GtkGLArea from its parent container - // gtk_container_remove(GTK_CONTAINER(parent_container), glarea); + // // Remove the GtkGLArea from its parent container + // gtk_container_remove(GTK_CONTAINER(parent_container), glarea); - // // Destroy the GtkGLArea widget - // gtk_widget_destroy(glarea); - // } + // // Destroy the GtkGLArea widget + // gtk_widget_destroy(glarea); + } static gboolean on_scroll(GtkWidget *widget, GdkEventScroll *event, gpointer data) @@ -215,13 +265,43 @@ class GtkGlRenderer::Detail // {"resize", G_CALLBACK(on_resize), (GdkEventMask)0}, // {"scroll-event", G_CALLBACK(on_scroll), GDK_SCROLL_MASK}, //connect_signals(glarea, signals, NELEM(signals)); - g_signal_connect(glarea, "realize", G_CALLBACK(on_realize), this); - g_signal_connect(glarea, "render", G_CALLBACK(on_render), this); - g_signal_connect(glarea, "resize", G_CALLBACK(on_resize), this); + + + std::chrono::time_point t = std::chrono::system_clock::now(); + auto dur = std::chrono::duration_cast(t.time_since_epoch()); + auto timeStamp = dur.count(); + renderId = g_signal_connect(glarea, "render", G_CALLBACK(on_render), this); + realizeId = g_signal_connect(glarea, "realize", G_CALLBACK(on_realize), this); + resizeId = g_signal_connect(glarea, "resize", G_CALLBACK(on_resize), this); + LOG_ERROR<<"Connect to renderId "<>>>>>>"; + // // g_signal_handler_disconnect(glarea, realizeId); + // // g_signal_handler_disconnect(glarea, renderId); + // // g_signal_handler_disconnect(glarea, resizeId); + // } + + void disconnect_glarea_signals(GtkWidget *glarea) + { + // g_signal_handlers_disconnect_by_func(glarea, G_CALLBACK(on_realize), this); + // g_signal_handlers_disconnect_by_func(glarea, G_CALLBACK(on_render), this); + // g_signal_handlers_disconnect_by_func(glarea, G_CALLBACK(on_resize), this); + LOG_ERROR << "disconnect_glarea_signals===================================>>>>>>>"; + g_signal_handler_disconnect(glarea, realizeId); + g_signal_handler_disconnect(glarea, renderId); + g_signal_handler_disconnect(glarea, resizeId); } bool init() { + LOG_ERROR << "MDETAIL GLAREA -> "<< glarea; connect_glarea_signals(glarea); // initialize_gl(GTK_GL_AREA(glarea)); return true; @@ -235,6 +315,9 @@ class GtkGlRenderer::Detail bool isDmaMem; bool isMetadataSet; GtkGlRendererProps mProps; + guint realizeId; + guint renderId; + guint resizeId; }; GtkGlRenderer::GtkGlRenderer(GtkGlRendererProps props) : Module(SINK, "GtkGlRenderer", props) @@ -243,6 +326,7 @@ GtkGlRenderer::GtkGlRenderer(GtkGlRendererProps props) : Module(SINK, "GtkGlRend mDetail->glarea = props.glArea; mDetail->windowWidth = props.windowWidth; mDetail->windowHeight = props.windowHeight; + //LOG_ERROR<<"i am creating gtkgl renderer width and height is "<mProps.windowWidth; } GtkGlRenderer::~GtkGlRenderer() {} @@ -264,9 +348,23 @@ bool GtkGlRenderer::init() bool GtkGlRenderer::process(frame_container &frames) { + auto myId = Module::getId(); // LOG_ERROR << "GOT " auto frame = frames.cbegin()->second; mDetail->cachedFrame = frame; + size_t underscorePos = myId.find('_'); + std::string numericPart = myId.substr(underscorePos + 1); + int myNumber = std::stoi(numericPart); + + if ((controlModule != nullptr) && (myNumber % 2 == 1)) + { + Rendertimestamp cmd; + auto myTime = frames.cbegin()->second->timestamp; + cmd.currentTimeStamp = myTime; + controlModule->queueCommand(cmd); + //LOG_ERROR << "myID is GtkGlRendererModule_ "<glarea; //mDetail->on_unrealize(); + mDetail->disconnect_glarea_signals(mDetail->glarea); mDetail->glarea = glArea; mDetail->windowWidth = windowWidth; mDetail->windowHeight = windowHeight; mDetail->init(); - LOG_ERROR << "After changing props ============"<glarea; + gtk_widget_show(glArea); } bool GtkGlRenderer::shouldTriggerSOS() @@ -319,11 +417,11 @@ bool GtkGlRenderer::shouldTriggerSOS() } bool GtkGlRenderer::processSOS(frame_sp &frame) -{ - LOG_INFO<<"I AM IN PROCESS-SOS !!!"; +{ + //mDetail->connect_glarea_signals(mDetail->glarea); auto inputMetadata = frame->getMetadata(); auto frameType = inputMetadata->getFrameType(); - LOG_INFO<<"GOT METADATA "<getFrameType(); + LOG_TRACE<<"GOT METADATA "<getFrameType(); int width = 0; int height = 0; @@ -376,4 +474,7 @@ bool GtkGlRenderer::processSOS(frame_sp &frame) return true; } - +bool GtkGlRenderer::handleCommand(Command::CommandType type, frame_sp &frame) +{ + return Module::handleCommand(type, frame); +} \ No newline at end of file diff --git a/base/src/Model.cpp b/base/src/Model.cpp index 68eba21b8..049204a29 100644 --- a/base/src/Model.cpp +++ b/base/src/Model.cpp @@ -7,6 +7,7 @@ // #include #ifndef GL_H #define GL_H +#include "fstream" #include #include #endif @@ -15,265 +16,34 @@ #include "Program.h" #include "GLUtils.h" -struct point { - float x; - float y; - float z; -} __attribute__((packed)); - -struct color { - float r; - float g; - float b; -} __attribute__((packed)); - -// Each vertex has position, normal and color: -struct vertex { - struct point pos; - struct point normal; - struct color color; -} __attribute__((packed)); - -// Each triangle has three vertices: -struct triangle { - struct vertex vert[3]; -} __attribute__((packed)); - -// Each corner point has a position and a color: -struct corner { - struct point pos; - struct color color; -} __attribute__((packed)); - -// Each face has a single normal, four corner points, -// and two triangles: -struct face { - struct corner corner[4]; - struct point normal; - struct triangle tri[2]; -} __attribute__((packed)); - -// Each cube has six faces: -struct cube { - struct face face[6]; -} __attribute__((packed)); - static GLuint vao, vbo; static float matrix[16] = { 0 }; -// Mouse movement: -static struct { - int x; - int y; -} pan; - -// Cube rotation axis: -static struct point rot = { - .x = 0.0f, - .y = 1.0f, - .z = 0.0f, -}; - -// Return the cross product of two vectors: -static void -cross (struct point *result, const struct point *a, const struct point *b) -{ - result->x = a->y * b->z - a->z * b->y; - result->y = a->z * b->x - a->x * b->z; - result->z = a->x * b->y - a->y * b->x; -} // Initialize the model: void model_init (void) { - // Define our cube: - // yash changes - // struct cube cube = - // { .face[0].corner = - // { { 0, 1, 0 } - // , { 1, 0, 0 } - // , { 0, 0, 0 } - // , { 1, 1, 0 } - // } - // , .face[1].corner = - // { { 0, 0, 0 } - // , { 1, 0, 1 } - // , { 0, 0, 1 } - // , { 1, 0, 0 } - // } - // , .face[2].corner = - // { { 1, 0, 0 } - // , { 1, 1, 1 } - // , { 1, 0, 1 } - // , { 1, 1, 0 } - // } - // , .face[3].corner = - // { { 1, 1, 0 } - // , { 0, 1, 1 } - // , { 1, 1, 1 } - // , { 0, 1, 0 } - // } - // , .face[4].corner = - // { { 0, 1, 0 } - // , { 0, 0, 1 } - // , { 0, 1, 1 } - // , { 0, 0, 0 } - // } - // , .face[5].corner = - // { { 0, 1, 1 } - // , { 1, 0, 1 } - // , { 1, 1, 1 } - // , { 0, 0, 1 } - // } - // } ; - - -struct cube cube; - cube.face[0].corner[0]={{ 0, 0, 0 },{0,0,0}}; // Bottom Face - cube.face[0].corner[1]={{ 0, 0, 0 },{0,0,0}}; - cube.face[0].corner[2]={{ 0, 0, 0 },{0,0,0}}; - cube.face[0].corner[3]={{ 0, 0, 0 },{0,0,0}}; - - cube.face[1].corner[0]={{ 0, 0, 0 },{0,0,0}}; // right face - cube.face[1].corner[1]={{ 0, 0, 0 },{0,0,0}}; - cube.face[1].corner[2]={{ 0, 0, 0 },{0,0,0}}; - cube.face[1].corner[3]={{ 0, 0, 0 },{0,0,0}}; - - cube.face[2].corner[0]={{ 0, 0, 0 },{0,0,0}}; - cube.face[2].corner[1]={{ 0, 0, 0 },{0,0,0}}; - cube.face[2].corner[2]={{ 0, 0, 0 },{0,0,0}}; - cube.face[2].corner[3]={{ 0, 0, 0 },{0,0,0}}; // top face - - cube.face[3].corner[0]={{ 0, 0, 0 },{0,0,0}}; - cube.face[3].corner[1]={{ 0, 0, 0 },{0,0,0}}; // left face - cube.face[3].corner[2]={{ 0, 0, 0 },{0,0,0}}; - cube.face[3].corner[3]={{ 0, 0, 0 },{0,0,0}}; - - cube.face[4].corner[0]={{ 0, 0, 0 },{0,0,0}}; - cube.face[4].corner[1]={{ 0, 0, 0 },{0,0,0}}; - cube.face[4].corner[2]={{ 0, 0, 0 },{0,0,0}}; // outside - cube.face[4].corner[3]={{ 0, 0, 0 },{0,0,0}}; - - cube.face[5].corner[0]={{ 0, 0, 0 },{0,0,0}}; - cube.face[5].corner[1]={{ 0, 0, 0 },{0,0,0}}; - cube.face[5].corner[2]={{ 0, 0, 0 },{0,0,0}}; //inside - cube.face[5].corner[3]={{ 0, 0, 0 },{0,0,0}}; - - // Generate colors for each corner based on its position: - FOREACH (cube.face, face) { - FOREACH (face->corner, corner) { - corner->color.r = corner->pos.x * 0.8f + 0.1f; - corner->color.g = corner->pos.y * 0.8f + 0.1f; - corner->color.b = corner->pos.z * 0.8f + 0.1f; - } - } - - // Center cube on the origin by translating corner points: - FOREACH (cube.face, face) { - FOREACH (face->corner, corner) { - corner->pos.x -= 1.0f; - corner->pos.y -= 1.0f; - corner->pos.z -= 1.0f; - } - } - // Yash Change - // FOREACH (cube.face, face) { - // FOREACH (face->corner, corner) { - // corner->pos.x -= 0.5f; - // corner->pos.y -= 0.5f; - // corner->pos.z -= 0.5f; - // } - // } - - // Face normals are cross product of two ribs: - FOREACH (cube.face, face) { - - // First rib is (corner 3 - corner 0): - struct point a = { - .x = face->corner[3].pos.x - face->corner[0].pos.x, - .y = face->corner[3].pos.y - face->corner[0].pos.y, - .z = face->corner[3].pos.z - face->corner[0].pos.z, - }; - - // Second rib is (corner 2 - corner 0): - struct point b = { - .x = face->corner[2].pos.x - face->corner[0].pos.x, - .y = face->corner[2].pos.y - face->corner[0].pos.y, - .z = face->corner[2].pos.z - face->corner[0].pos.z, - }; - - // Face normal is cross product of these two ribs: - cross(&face->normal, &a, &b); - } - - // Create two triangles for each face: - FOREACH (cube.face, face) { - - // Corners to compose triangles of, chosen in - // such a way that both triangles rotate CCW: - int index[2][3] = { { 0, 2, 1 }, { 1, 3, 0 } }; - - for (int t = 0; t < 2; t++) { - for (int v = 0; v < 3; v++) { - int c = index[t][v]; - struct corner *corner = &face->corner[c]; - struct vertex *vertex = &face->tri[t].vert[v]; - - vertex->pos = corner->pos; - vertex->normal = face->normal; - vertex->color = corner->color; - } - } - } - - // Copy vertices into separate array for drawing: - struct vertex vertex[6 * 2 * 3]; - struct vertex *cur = vertex; - - FOREACH (cube.face, face) { - FOREACH (face->tri, tri) { - for (int v = 0; v < 3; v++) { - *cur++ = tri->vert[v]; - } - } - } - - // Generate empty buffer: - glGenBuffers(1, &vbo); - - // Generate empty vertex array object: - glGenVertexArrays(1, &vao); - - // Set as current vertex array: - glBindVertexArray(vao); - glBindBuffer(GL_ARRAY_BUFFER, vbo); - - // Add vertex, color and normal data to buffers: - struct { - enum LocCube loc; - const void *ptr; - } - map[] = { - { .loc = LOC_CUBE_VERTEX - , .ptr = (void *) offsetof(struct vertex, pos) - } , - { .loc = LOC_CUBE_VCOLOR - , .ptr = (void *) offsetof(struct vertex, color) - } , - { .loc = LOC_CUBE_NORMAL - , .ptr = (void *) offsetof(struct vertex, normal) - } , - }; - - FOREACH (map, m) { - GLint loc = program_cube_loc(m->loc); - glEnableVertexAttribArray(loc); - glVertexAttribPointer(loc, 3, GL_FLOAT, GL_FALSE, sizeof(struct vertex), m->ptr); - } - + glGenBuffers(1, &vbo); + + // Generate empty vertex array object: + glGenVertexArrays(1, &vao); + + // Set as the current vertex array: + glBindVertexArray(vao); + glBindBuffer(GL_ARRAY_BUFFER, vbo); + + // Vertices for a quad: + GLfloat vertices[] = { + -1.0f, -1.0f, 0.0f, 1.0f, + 1.0f, -1.0f, 0.0f, 1.0f, + 1.0f, 1.0f, 0.0f, 1.0f, + -1.0f, 1.0f, 0.0f, 1.0f, + }; // Upload vertex data: - glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW); + // glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW); + glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW); + // Generate a number for our textureID's unique handle GLuint textureID; glGenTextures(1, &textureID); @@ -282,6 +52,40 @@ struct cube cube; glBindTexture(GL_TEXTURE_2D, textureID); } +void saveTextureToTextFile(const char* filename, int width, int height) { + GLubyte* pixels = new GLubyte[width * height * 4]; // Assuming RGBA format + + // Read pixel data from the framebuffer (assuming the texture is bound to the framebuffer) + glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, pixels); + + // Open the file for writing + std::ofstream outFile(filename, std::ios::out); + if (!outFile.is_open()) { + std::cerr << "Failed to open the file: " << filename << std::endl; + delete[] pixels; + return; + } + + // Write width and height to the file + outFile << width << " " << height << std::endl; + + // Write pixel values to the file + for (int y = 0; y < height; ++y) { + for (int x = 0; x < width; ++x) { + int index = (y * width + x) * 4; // Assuming RGBA format + outFile << static_cast(pixels[index]) << " "; + outFile << static_cast(pixels[index + 1]) << " "; + outFile << static_cast(pixels[index + 2]) << " "; + outFile << static_cast(pixels[index + 3]) << " "; // Alpha channel + } + outFile << std::endl; + } + + outFile.close(); + delete[] pixels; +} + + void matToTexture(unsigned char* buffer , GLenum minFilter, GLenum magFilter, GLenum wrapFilter, int width, int height) { // Catch silly-mistake texture interpolation method for magnification @@ -334,121 +138,13 @@ void matToTexture(unsigned char* buffer , GLenum minFilter, GLenum magFilter, GL // std::cout << "Will Generate minmap" << std::endl; glGenerateMipmap(GL_TEXTURE_2D); } -} - - -// void -// draw_frames(void) -// { -// unsigned char* buffer = (unsigned char*) malloc(640 * 480 * 3); -// // Fill the buffer with green color -// for (int i = 0; i < 640 * 480 * 3; i += 3) { -// buffer[i] = 0; // Blue channel -// buffer[i + 1] = 255; // Green channel -// buffer[i + 2] = 0; // Red channel -// } -// int window_height = 480; -// int window_width = 640; - -// static float angle = 0.0f; - -// // Rotate slightly: -// angle += 0.00f; - -// // Setup rotation matrix: -// mat_rotate(matrix, rot.x, rot.y, rot.z, angle); - -// // cv::Mat frame(480, 640, CV_8UC3, cv::Scalar(0, 255, 0)); -// glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); -// glMatrixMode(GL_MODELVIEW); // Operate on model-view matrix - -// glEnable(GL_TEXTURE_2D); -// GLuint image_tex = matToTexture(buffer, GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR, GL_CLAMP); -// glBegin(GL_QUADS); -// glTexCoord2i(0, 0); glVertex2i(0, 0); -// glTexCoord2i(0, 1); glVertex2i(0, window_height); -// glTexCoord2i(1, 1); glVertex2i(window_width, window_height); -// glTexCoord2i(1, 0); glVertex2i(window_width, 0); -// glEnd(); - -// glDeleteTextures(1, &image_tex); -// glDisable(GL_TEXTURE_2D); -// } -int global_count = 0; - -void -draw_frames(void) -{ - // unsigned char* buffer = (unsigned char*) malloc(640 * 480 * 3); - // // Fill the buffer with green color - - // if(global_count % 2 == 0) - // { - // for (int i = 0; i < 640 * 480 * 3; i += 3) { - // buffer[i] = 0; // Blue channel - // buffer[i + 1] = 255; // Green channel - // buffer[i + 2] = 0; // Red channel - // } - // } - // else - // { - // for (int i = 0; i < 640 * 480 * 3; i += 3) { - // buffer[i] = 255; // Blue channel - // buffer[i + 1] = 0; // Green channel - // buffer[i + 2] = 0; // Red channel - // } - - // } - unsigned char *buffer = (unsigned char *)malloc(640 * 480 * 3); - // Fill the buffer with green color - - if (global_count % 2 == 0) - { - for (int i = 0; i < 640 * 480 * 3; i += 3) - { - buffer[i] = 0; // Blue channel - buffer[i + 1] = 255; // Green channel - buffer[i + 2] = 0; // Red channel - } - } - else - { - for (int i = 0; i < 640 * 480 * 3; i += 3) - { - buffer[i] = 255; // Blue channel - buffer[i + 1] = 0; // Green channel - buffer[i + 2] = 0; // Red channel - } - } - - - int window_height = 480; - int window_width = 640; - - static float angle = 0.0f; - - // Rotate slightly: - angle += 0.00f; - - // Setup rotation matrix: - mat_rotate(matrix, rot.x, rot.y, rot.z, angle); - - // GLuint image_tex = matToTexture(buffer, GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR, GL_CLAMP); - - // Don't clip against background: - glClear(GL_DEPTH_BUFFER_BIT); - - // Draw all the triangles in the buffer: - glBindVertexArray(vao); - glDrawArrays(GL_TRIANGLES, 0, 12 * 3); - global_count++; + // saveTextureToTextFile("newvindows.png", 1280, 720); } void drawCameraFrame(void* frameData, int width, int height){ static float angle = 0.0f; - mat_rotate(matrix, rot.x, rot.y, rot.z, angle); matToTexture((unsigned char*)frameData, GL_LINEAR_MIPMAP_LINEAR, GL_LINEAR, GL_CLAMP_TO_BORDER, width, height); @@ -457,52 +153,12 @@ void drawCameraFrame(void* frameData, int width, int height){ // Draw all the triangles in the buffer: glBindVertexArray(vao); - glDrawArrays(GL_TRIANGLES, 0, 12 * 3); -} - -void -model_draw (void) -{ - static float angle = 0.0f; - - // Rotate slightly: - angle += 0.00f; - - // Setup rotation matrix: - mat_rotate(matrix, rot.x, rot.y, rot.z, angle); - - // Use our own shaders: - // Main program for rendering - program_cube_use(); - - // Don't clip against background: - glClear(GL_DEPTH_BUFFER_BIT); + glDrawArrays(GL_QUADS, 0, 4); + } - // Draw all the triangles in the buffer: - glBindVertexArray(vao); - glDrawArrays(GL_TRIANGLES, 0, 12 * 3); -} const float * model_matrix (void) { return matrix; } - -void -model_pan_start (int x, int y) -{ - pan.x = x; - pan.y = y; -} - -void -model_pan_move (int x, int y) -{ - int dx = pan.x - x; - int dy = pan.y - y; - - // Rotation vector is perpendicular to (dx, dy): - rot.x = dy; - rot.y = -dx; -} diff --git a/base/src/OrderedCacheOfFiles.cpp b/base/src/OrderedCacheOfFiles.cpp index 3beb230ce..91693cdbb 100644 --- a/base/src/OrderedCacheOfFiles.cpp +++ b/base/src/OrderedCacheOfFiles.cpp @@ -689,7 +689,7 @@ bool OrderedCacheOfFiles::parseFiles(uint64_t start_ts, bool direction, bool inc } // cache insertion - LOG_INFO << "cache insert: " << mp4File << "\n"; + // LOG_INFO << "cache insert: " << mp4File << "\n"; Video vid(mp4File.string(), fileTS); /* ----- first relevant file found ----- */ diff --git a/base/src/Program.cpp b/base/src/Program.cpp index 0d175a0b1..c4d977a2b 100644 --- a/base/src/Program.cpp +++ b/base/src/Program.cpp @@ -18,45 +18,6 @@ #include "Program.h" #include "GLUtils.h" - -const GLchar *CUBE_VERTEX_SOURCE = -"#version 330\n" -"uniform mat4 view_matrix;\n" -"uniform mat4 model_matrix;\n" -"in vec3 vertex;\n" -"in vec3 vcolor;\n" -"in vec3 normal;\n" -"out vec3 fcolor;\n" -"out vec3 fpos;\n" -"out float fdot;\n" -"void main (void)\n" -"{\n" -" vec4 modelspace = model_matrix * vec4(vertex, 1.0);\n" -" gl_Position = view_matrix * modelspace;\n" -" fcolor = vcolor;\n" -" vec4 sight = vec4(0, 0, -1.0, 0.0);\n" -" vec4 wnormal = model_matrix * vec4(normal, 0.0);\n" -" fdot = dot(sight, wnormal);\n" -" fpos = modelspace.xyz;\n" -"}\n"; - - -const GLchar *CUBE_FRAGMENT_SOURCE = -"#version 330\n" -"in vec3 fcolor;\n" -"in vec3 fpos;\n" -"in float fdot;\n" -"out vec4 fragcolor;\n" -"void main (void)\n" -"{\n" -" if (!gl_FrontFacing)\n" -" return;\n" -" vec3 linear = pow(fcolor, vec3(1.0 / 2.2));\n" -" float dst = distance(vec3(0, 0, 2), fpos) * 0.4;\n" -" vec3 scaled = linear * vec3(fdot * dst);\n" -" fragcolor = vec4(pow(scaled, vec3(2.2)), 0.0);\n" -"}\n"; - const GLchar *BKGD_VERTEX_SOURCE = "#version 150\n" "in vec2 vertex;\n" @@ -67,20 +28,6 @@ const GLchar *BKGD_VERTEX_SOURCE = " ftex = vec2(texture.x, 1.0 - texture.y);\n" " gl_Position = vec4(vertex, 0.5, 1.0);\n" "}\n"; -// " ftex = texture;\n" -// const GLchar *BKGD_VERTEX_SOURCE = -// "#version 150\n" -// "in vec2 vertex;\n" -// "in vec2 texture;\n" -// "out vec2 ftex;\n" -// "uniform mat4 view_matrix;\n" // Include any necessary matrices -// "uniform mat4 model_matrix;\n" -// "void main (void)\n" -// "{\n" -// " ftex = (model_matrix * vec4(texture, 0.0, 1.0)).xy;\n" -// " gl_Position = view_matrix * vec4(vertex, 0.5, 1.0);\n" -// "}\n"; - const GLchar *BKGD_FRAGMENT_SOURCE = "#version 150\n" @@ -118,19 +65,11 @@ static struct loc loc_bkgd[] = { [LOC_BKGD_TEXTURE] = { "texture", ATTRIBUTE }, }; -static struct loc loc_cube[] = { - [LOC_CUBE_VIEW] = { "view_matrix", UNIFORM }, - [LOC_CUBE_MODEL] = { "model_matrix", UNIFORM }, - [LOC_CUBE_VERTEX] = { "vertex", ATTRIBUTE }, - [LOC_CUBE_VCOLOR] = { "vcolor", ATTRIBUTE }, - [LOC_CUBE_NORMAL] = { "normal", ATTRIBUTE }, -}; // Programs: enum { - BKGD, - CUBE, -}; + BKGD + }; struct program { struct { @@ -142,7 +81,7 @@ struct program { GLuint id; }; -static program programs[2] = { +static program programs[1] = { { { { @@ -157,21 +96,6 @@ static program programs[2] = { loc_bkgd, NELEM(loc_bkgd), 0 - }, - { - { - { - (const uint8_t *)CUBE_VERTEX_SOURCE, - (const uint8_t *)CUBE_VERTEX_SOURCE + strlen(CUBE_VERTEX_SOURCE) - }, - { - (const uint8_t *)CUBE_FRAGMENT_SOURCE, - (const uint8_t *)CUBE_FRAGMENT_SOURCE + strlen(CUBE_FRAGMENT_SOURCE) - } - }, - loc_cube, - NELEM(loc_cube), - 1 } }; @@ -285,20 +209,10 @@ programs_init (void) program_init(p); } -void -program_cube_use (void) -{ - glUseProgram(programs[CUBE].id); - - glUniformMatrix4fv(loc_cube[LOC_CUBE_VIEW ].id, 1, GL_FALSE, view_matrix()); - glUniformMatrix4fv(loc_cube[LOC_CUBE_MODEL].id, 1, GL_FALSE, model_matrix()); -} - void program_bkgd_use (void) { glUseProgram(programs[BKGD].id); - glUniform1i(glGetUniformLocation(programs[BKGD].id, "tex"), 0); } @@ -307,9 +221,3 @@ program_bkgd_loc (const enum LocBkgd index) { return loc_bkgd[index].id; } - -GLint -program_cube_loc (const enum LocCube index) -{ - return loc_cube[index].id; -} diff --git a/base/test/gtkglrenderer_tests.cpp b/base/test/gtkglrenderer_tests.cpp index 86fa99343..41aac31be 100644 --- a/base/test/gtkglrenderer_tests.cpp +++ b/base/test/gtkglrenderer_tests.cpp @@ -1,5 +1,6 @@ #include - +#include +#include #include "PipeLine.h" #include "NvV4L2Camera.h" #include "NvTransform.h" @@ -30,11 +31,26 @@ // #define CONFIG_PATH "config/" PipeLine p("test"); +PipeLine p2("test2"); +PipeLine p3("test3"); +PipeLine p4("test4"); +PipeLine p5("test5"); +PipeLine p6("test6"); GtkWidget *glarea; GtkWidget *glarea2; GtkWidget *glarea3; GtkWidget *glarea4; +GtkWidget *glarea5; +GtkWidget *glarea6; +GtkWidget *window; GtkWidget *glAreaSwitch; +GtkWidget *parentCont; +GtkWidget *parentCont4; +GtkWidget *parentCont3; +GtkWidget *parentCont5; +GtkWidget *parentCont6; +static int pipelineNumber = 0; + BOOST_AUTO_TEST_SUITE(gtkglrenderer_tests) struct rtsp_client_tests_data { @@ -67,40 +83,14 @@ BOOST_AUTO_TEST_CASE(basic, *boost::unit_test::disabled()) // p.term(); // p.wait_for_all(); } -void lauchAtlPipeline() -{ - Logger::setLogLevel(boost::log::trivial::severity_level::info); - - auto source = boost::shared_ptr(new NvV4L2Camera(NvV4L2CameraProps(640, 360, 10))); - - auto transform = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); - source->setNext(transform); - - // GtkGlRendererProps gtkglsinkProps(glarea, 1280, 720); - // auto sink = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); - // transform->setNext(sink); - - // GtkGlRendererProps gtkglsinkProps2(glarea2, 1280, 720); - // auto sink2 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps2)); - // transform->setNext(sink2); - // GtkGlRendererProps gtkglsinkProps3(glarea3, 1280, 720); - // auto sink3 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps3)); - // transform->setNext(sink3); - - // GtkGlRendererProps gtkglsinkProps4(glarea4, 1280, 720); - // auto sink4 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps4)); - // transform->setNext(sink4); - - auto sink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0))); - transform->setNext(sink); - - p.appendModule(source); +void secondPipeline() +{ p.init(); - Logger::setLogLevel(boost::log::trivial::severity_level::info); p.run_all_threaded(); } -boost::shared_ptr launchPipeline() + +boost::shared_ptr launchPipeline1() { rtsp_client_tests_data d; string url = "rtsp://root:m4m1g0@10.102.10.77/axis-media/media.amp"; @@ -121,7 +111,7 @@ boost::shared_ptr launchPipeline() auto transform = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); decoder_1->setNext(transform); - //MEMCONVERT TO DEVICE + // //MEMCONVERT TO DEVICE auto stream = cudastream_sp(new ApraCudaStream); auto memconversion1 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::CUDA_DEVICE, stream))); transform->setNext(memconversion1); @@ -134,95 +124,248 @@ boost::shared_ptr launchPipeline() auto memconversion2 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::DMABUF, stream))); resizenppi->setNext(memconversion2); - GtkGlRendererProps gtkglsinkProps(glarea, 640, 360); - GtkGl = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); + GtkGlRendererProps gtkglsinkProps(glarea, 1, 1); + auto GtkGl = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); memconversion2->setNext(GtkGl); - //auto eglsink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0,0))); - // memconversion2->setNext(eglsink); + p.appendModule(source); + p.init(); + p.run_all_threaded(); + return GtkGl; +} + +boost::shared_ptr launchPipeline2() +{ + rtsp_client_tests_data d2; + string url2 = "rtsp://10.102.10.75/axis-media/media.amp"; + + //RTSP + RTSPClientSrcProps rtspProps2 = RTSPClientSrcProps(url2, d2.empty, d2.empty); + auto source2 = boost::shared_ptr(new RTSPClientSrc(rtspProps2)); + auto meta2 = framemetadata_sp(new H264Metadata()); + source2->addOutputPin(meta2); + + //H264DECODER + H264DecoderProps decoder_1_Props2 = H264DecoderProps(); + auto decoder_12 = boost::shared_ptr(new H264Decoder(decoder_1_Props2)); + source2->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); + source2->setNext(decoder_12); + + //NV-TRANSFORM + auto transform2 = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + decoder_12->setNext(transform2); + + //MEMCONVERT TO DEVICE + auto stream = cudastream_sp(new ApraCudaStream); + auto memconversion12 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::CUDA_DEVICE, stream))); + transform2->setNext(memconversion12); + + //RESIZE-NPPI + auto resizenppi2 = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream))); + memconversion12->setNext(resizenppi2); + + //MEMCONVERT TO DMA + auto memconversion22 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::DMABUF, stream))); + resizenppi2->setNext(memconversion22); + + GtkGlRendererProps gtkglsinkProps2(glAreaSwitch, 2, 2); + auto GtkGl2 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps2)); + - // GtkGlRendererProps gtkglsinkProps2(glarea2, 1024, 1024); - // auto sink2 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps2)); - // memconversion2->setNext(sink2); + memconversion22->setNext(GtkGl2); + + p2.appendModule(source2); + p2.init(); + p2.run_all_threaded(); + return GtkGl2; +} - // GtkGlRendererProps gtkglsinkProps3(glarea3, 1024, 1024); - // auto sink3 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps3)); - // memconversion2->setNext(sink3); +boost::shared_ptr launchPipeline3() +{ + rtsp_client_tests_data d3; + string url3 = "rtsp://10.102.10.42/axis-media/media.amp"; - // GtkGlRendererProps gtkglsinkProps4(glarea4, 1024, 1024); - // auto sink4 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps4)); - // memconversion2->setNext(sink4); + //RTSP + RTSPClientSrcProps rtspProps3 = RTSPClientSrcProps(url3, d3.empty, d3.empty); + auto source3 = boost::shared_ptr(new RTSPClientSrc(rtspProps3)); + auto meta3 = framemetadata_sp(new H264Metadata()); + source3->addOutputPin(meta3); - // auto eglsink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0,0))); - // decoder_1->setNext(eglsink); + //H264DECODER + H264DecoderProps decoder_3_Props2 = H264DecoderProps(); + auto decoder_13 = boost::shared_ptr(new H264Decoder(decoder_3_Props2)); + source3->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); + source3->setNext(decoder_13); - p.appendModule(source); - p.init(); - Logger::setLogLevel(boost::log::trivial::severity_level::info); - p.run_all_threaded(); + //NV-TRANSFORM + auto transform3 = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + decoder_13->setNext(transform3); + + //MEMCONVERT TO DEVICE + auto stream3 = cudastream_sp(new ApraCudaStream); + auto memconversion13 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::CUDA_DEVICE, stream3))); + transform3->setNext(memconversion13); + + //RESIZE-NPPI + auto resizenppi3 = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream3))); + memconversion13->setNext(resizenppi3); + + //MEMCONVERT TO DMA + auto memconversion33 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::DMABUF, stream3))); + resizenppi3->setNext(memconversion33); - return GtkGl; -; + GtkGlRendererProps gtkglsinkProps3(glarea3, 2, 2); + auto GtkGl3 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps3)); + + memconversion33->setNext(GtkGl3); + + p3.appendModule(source3); + p3.init(); + p3.run_all_threaded(); + return GtkGl3; } -void launchPipelineRTSP() +boost::shared_ptr launchPipeline4() { - rtsp_client_tests_data d; - string url = "rtsp://root:m4m1g0@10.102.10.77/axis-media/media.amp"; - RTSPClientSrcProps rtspProps = RTSPClientSrcProps(url, d.empty, d.empty); - auto source = boost::shared_ptr(new RTSPClientSrc(rtspProps)); - auto meta = framemetadata_sp(new H264Metadata()); - source->addOutputPin(meta); + rtsp_client_tests_data d4; + string url4 = "rtsp://10.102.10.42/axis-media/media.amp"; + + //RTSP + RTSPClientSrcProps rtspProps4 = RTSPClientSrcProps(url4, d4.empty, d4.empty); + auto source4 = boost::shared_ptr(new RTSPClientSrc(rtspProps4)); + auto meta4 = framemetadata_sp(new H264Metadata()); + source4->addOutputPin(meta4); + + //H264DECODER + H264DecoderProps decoder_4_Props2 = H264DecoderProps(); + auto decoder_14 = boost::shared_ptr(new H264Decoder(decoder_4_Props2)); + source4->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); + source4->setNext(decoder_14); + + //NV-TRANSFORM + auto transform4 = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + decoder_14->setNext(transform4); + + //MEMCONVERT TO DEVICE + auto stream4 = cudastream_sp(new ApraCudaStream); + auto memconversion14 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::CUDA_DEVICE, stream4))); + transform4->setNext(memconversion14); + + //RESIZE-NPPI + auto resizenppi4 = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream4))); + memconversion14->setNext(resizenppi4); + + //MEMCONVERT TO DMA + auto memconversion44 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::DMABUF, stream4))); + resizenppi4->setNext(memconversion44); + + GtkGlRendererProps gtkglsinkProps4(glarea4, 2, 2); + auto GtkGl4 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps4)); + - H264DecoderProps decoder_1_Props = H264DecoderProps(); - auto decoder_1 = boost::shared_ptr(new H264Decoder(decoder_1_Props)); - source->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); - source->setNext(decoder_1); + memconversion44->setNext(GtkGl4); + + p4.appendModule(source4); + p4.init(); + p4.run_all_threaded(); + return GtkGl4; +} +boost::shared_ptr launchPipeline5() +{ + rtsp_client_tests_data d5; + string url5 = "rtsp://10.102.10.75/axis-media/media.amp"; + //RTSP + RTSPClientSrcProps rtspProps5 = RTSPClientSrcProps(url5, d5.empty, d5.empty); + auto source5 = boost::shared_ptr(new RTSPClientSrc(rtspProps5)); + auto meta5 = framemetadata_sp(new H264Metadata()); + source5->addOutputPin(meta5); - auto transform = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); - decoder_1->setNext(transform); + //H264DECODER + H264DecoderProps decoder_5_Props2 = H264DecoderProps(); + auto decoder_15 = boost::shared_ptr(new H264Decoder(decoder_5_Props2)); + source5->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); + source5->setNext(decoder_15); + + //NV-TRANSFORM + auto transform5 = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + decoder_15->setNext(transform5); - // auto stream = cudastream_sp(new ApraCudaStream); - // auto copy1 = boost::shared_ptr(new CudaMemCopy(CudaMemCopyProps(cudaMemcpyHostToDevice, stream))); - // transform->setNext(copy1); + //MEMCONVERT TO DEVICE + auto stream5 = cudastream_sp(new ApraCudaStream); + auto memconversion15 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::CUDA_DEVICE, stream5))); + transform5->setNext(memconversion15); - // auto m2 = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream))); - // copy1->setNext(m2); - // auto copy2 = boost::shared_ptr(new CudaMemCopy(CudaMemCopyProps(cudaMemcpyDeviceToHost, stream))); - // m2->setNext(copy2); - // auto outputPinId = copy2->getAllOutputPinsByType(FrameMetadata::RAW_IMAGE)[0]; + //RESIZE-NPPI + auto resizenppi5 = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream5))); + memconversion15->setNext(resizenppi5); + //MEMCONVERT TO DMA + auto memconversion55 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::DMABUF, stream5))); + resizenppi5->setNext(memconversion55); + + GtkGlRendererProps gtkglsinkProps5(glarea5, 2, 2); + auto GtkGl5 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps5)); - GtkGlRendererProps gtkglsinkProps(glarea, 1280, 720); - auto sink = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps)); - transform->setNext(sink); - GtkGlRendererProps gtkglsinkProps2(glarea2, 1280, 720); - auto sink2 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps2)); - transform->setNext(sink2); + memconversion55->setNext(GtkGl5); + + p5.appendModule(source5); + p5.init(); + p5.run_all_threaded(); + return GtkGl5; +} - GtkGlRendererProps gtkglsinkProps3(glarea3, 1280, 720); - auto sink3 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps3)); - transform->setNext(sink3); +boost::shared_ptr launchPipeline6() +{ + rtsp_client_tests_data d6; + string url6 = "rtsp://root:m4m1g0@10.102.10.77/axis-media/media.amp"; - GtkGlRendererProps gtkglsinkProps4(glarea4, 1280, 720); - auto sink4 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps4)); - transform->setNext(sink4); + //RTSP + RTSPClientSrcProps rtspProps6 = RTSPClientSrcProps(url6, d6.empty, d6.empty); + auto source6 = boost::shared_ptr(new RTSPClientSrc(rtspProps6)); + auto meta6 = framemetadata_sp(new H264Metadata()); + source6->addOutputPin(meta6); - // auto eglsink = boost::shared_ptr(new EglRenderer(EglRendererProps(0,0,0))); - // decoder_1->setNext(eglsink); + //H264DECODER + H264DecoderProps decoder_6_Props2 = H264DecoderProps(); + auto decoder_16 = boost::shared_ptr(new H264Decoder(decoder_6_Props2)); + source6->getAllOutputPinsByType(FrameMetadata::FrameType::H264_DATA); + source6->setNext(decoder_16); - p.appendModule(source); - p.init(); - Logger::setLogLevel(boost::log::trivial::severity_level::info); - p.run_all_threaded(); + //NV-TRANSFORM + auto transform6 = boost::shared_ptr(new NvTransform(NvTransformProps(ImageMetadata::RGBA))); + decoder_16->setNext(transform6); + + //MEMCONVERT TO DEVICE + auto stream6 = cudastream_sp(new ApraCudaStream); + auto memconversion16 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::CUDA_DEVICE, stream6))); + transform6->setNext(memconversion16); + + //RESIZE-NPPI + auto resizenppi6 = boost::shared_ptr(new ResizeNPPI(ResizeNPPIProps(640, 360, stream6))); + memconversion16->setNext(resizenppi6); + //MEMCONVERT TO DMA + auto memconversion66 = boost::shared_ptr(new MemTypeConversion(MemTypeConversionProps(FrameMetadata::DMABUF, stream6))); + resizenppi6->setNext(memconversion66); + + GtkGlRendererProps gtkglsinkProps6(glarea6, 2, 2); + auto GtkGl6 = boost::shared_ptr(new GtkGlRenderer(gtkglsinkProps6)); + + + memconversion66->setNext(GtkGl6); + + p6.appendModule(source6); + p6.init(); + p6.run_all_threaded(); + return GtkGl6; } + void screenChanged(GtkWidget *widget, GdkScreen *old_screen, gpointer userdata) { @@ -246,23 +389,94 @@ void my_getsize(GtkWidget *widget, GtkAllocation *allocation, void *data) { } static gboolean hide_gl_area(gpointer data) { + // gtk_widget_hide(glarea); + + GtkWidget* parentContainer = gtk_widget_get_parent(GTK_WIDGET(glarea)); + gtk_widget_unrealize(glarea); + // gtk_container_remove(GTK_CONTAINER(parentContainer), glarea); + // // // Remove the GtkGLArea from its parent container + // gtk_gl_area_queue_render(GTK_GL_AREA(glAreaSwitch)); + + return G_SOURCE_REMOVE; // Remove the timeout source after execution +} + +static gboolean hideWidget(gpointer data) { gtk_widget_hide(glarea); - gtk_widget_hide(glAreaSwitch); return G_SOURCE_REMOVE; // Remove the timeout source after execution } static gboolean change_gl_area(gpointer data) { GtkGl->changeProps(glAreaSwitch, 640, 360); GtkGl->step(); + gtk_container_add(GTK_CONTAINER(parentCont), glAreaSwitch); + gtk_gl_area_queue_render(GTK_GL_AREA(glAreaSwitch)); + gtk_widget_queue_draw(GTK_WIDGET(glAreaSwitch)); + return G_SOURCE_REMOVE; // Change the glarea before showing } static gboolean show_gl_area(gpointer data) { - //gtk_widget_show(glarea); + // gtk_widget_show(glarea); gtk_widget_show(glAreaSwitch); return G_SOURCE_REMOVE; // Remove the timeout source after execution } +void startPipeline6() +{ + LOG_ERROR<<"CALLING PIPELINE 6!!!!!!"; + launchPipeline6(); + gtk_container_add(GTK_CONTAINER(parentCont6),GTK_WIDGET(glarea6)); + gtk_widget_show(GTK_WIDGET(glarea6)); +} + +void startPipeline5() +{ + LOG_ERROR<<"CALLING PIPELINE 5!!!!!!"; + launchPipeline5(); + gtk_container_add(GTK_CONTAINER(parentCont5),GTK_WIDGET(glarea5)); + gtk_widget_show(GTK_WIDGET(glarea5)); +} + +void startPipeline4() +{ + LOG_ERROR<<"CALLING PIPELINE 4!!!!!!"; + launchPipeline4(); + gtk_container_add(GTK_CONTAINER(parentCont4),GTK_WIDGET(glarea4)); + gtk_widget_show(GTK_WIDGET(glarea4)); +} + +void startPipeline3() +{ + LOG_ERROR<<"CALLING PIPELINE 3!!!!!!"; + launchPipeline3(); + gtk_container_add(GTK_CONTAINER(parentCont3),GTK_WIDGET(glarea3)); + gtk_widget_show(GTK_WIDGET(glarea3)); + //startPipeline4(); +} + +void on_button_clicked() +{ + LOG_ERROR<<"CALLING BUTTON CLICKED!!!!!!"; + // gtk_widget_hide(GTK_WIDGET(glarea)); + if(pipelineNumber == 0){ + launchPipeline2(); + gtk_container_add(GTK_CONTAINER(parentCont),GTK_WIDGET(glAreaSwitch)); + gtk_widget_show(GTK_WIDGET(glAreaSwitch)); + } else if(pipelineNumber == 1){ + startPipeline3(); + }else if(pipelineNumber == 2){ + startPipeline4(); + } + else if(pipelineNumber == 3){ + startPipeline5(); + } + else if(pipelineNumber == 4){ + startPipeline6(); + } + pipelineNumber+=1; + + +} BOOST_AUTO_TEST_CASE(windowInit2, *boost::unit_test::disabled()) { @@ -278,9 +492,10 @@ BOOST_AUTO_TEST_CASE(windowInit2, *boost::unit_test::disabled()) gtk_builder_add_from_file(m_builder, "/mnt/disks/ssd/vinayak/backup/GtkRendererModule/ApraPipes/assets/appui.glade", NULL); std::cout << "ui glade found" << std::endl; - GtkWidget *window = GTK_WIDGET(gtk_window_new(GTK_WINDOW_TOPLEVEL)); + window = GTK_WIDGET(gtk_window_new(GTK_WINDOW_TOPLEVEL)); + gtk_window_set_decorated(GTK_WINDOW(window), FALSE); g_object_ref(window); - gtk_window_set_default_size(GTK_WINDOW(window), 1280, 400); + gtk_window_set_default_size(GTK_WINDOW(window), 3840, 2160); gtk_window_set_resizable(GTK_WINDOW(window), FALSE); gtk_widget_set_app_paintable(window, TRUE); @@ -291,19 +506,38 @@ BOOST_AUTO_TEST_CASE(windowInit2, *boost::unit_test::disabled()) GtkWidget *mainFixed = GTK_WIDGET(gtk_builder_get_object(m_builder, "mainWidget")); gtk_container_add(GTK_CONTAINER(window), mainFixed); - glarea = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw")); + GtkWidget *button = GTK_WIDGET(gtk_builder_get_object(m_builder, "button")); + g_signal_connect(button, "clicked", G_CALLBACK(on_button_clicked), NULL); + glarea = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw")); + glarea3 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw3")); + glarea4 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw4")); + glarea5 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw5")); + glarea6 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw6")); glAreaSwitch = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw2")); - // glarea2 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw2")); - // glarea3 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw3")); - // glarea4 = GTK_WIDGET(gtk_builder_get_object(m_builder, "glareadraw4")); + parentCont = gtk_widget_get_parent(GTK_WIDGET(glAreaSwitch)); + parentCont3 = gtk_widget_get_parent(GTK_WIDGET(glarea3)); + parentCont4 = gtk_widget_get_parent(GTK_WIDGET(glarea4)); + parentCont5 = gtk_widget_get_parent(GTK_WIDGET(glarea5)); + parentCont6 = gtk_widget_get_parent(GTK_WIDGET(glarea6)); + gtk_container_remove(GTK_CONTAINER(parentCont), glAreaSwitch); + gtk_container_remove(GTK_CONTAINER(parentCont3), glarea3); + gtk_container_remove(GTK_CONTAINER(parentCont4), glarea4); + gtk_container_remove(GTK_CONTAINER(parentCont5), glarea5); + gtk_container_remove(GTK_CONTAINER(parentCont6), glarea6); + // // Remove the GtkGLArea from its parent container + std::cout << "Printing Pointer of Old & New GL AREA" << glarea << "======== " << glAreaSwitch << std::endl; + g_signal_connect(window, "destroy", G_CALLBACK(gtk_main_quit), NULL); //g_signal_connect(glarea, "size-allocate", G_CALLBACK(my_getsize), NULL); - launchPipeline(); + launchPipeline1(); + //launchPipeline2(); gtk_widget_show_all(window); - g_timeout_add(5000, hide_gl_area, NULL); - g_timeout_add(7000, change_gl_area, NULL); - g_timeout_add(9000, show_gl_area, NULL); + + // g_timeout_add(2000, hideWidget, NULL); + // g_timeout_add(5000, hide_gl_area, NULL); + // g_timeout_add(7000, change_gl_area, NULL); + // g_timeout_add(9000, show_gl_area, NULL); gtk_main(); p.stop(); From 356fd124fb9710494fe566c58c4469fb6f2c60ca Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Mon, 12 Feb 2024 11:11:07 +0530 Subject: [PATCH 15/19] Reverse play fixes and gtkgl cleanup --- base/include/AIPExceptions.h | 120 ++++++++++++---------- base/include/Command.h | 29 +++++- base/include/GtkGlRenderer.h | 6 ++ base/include/H264Utils.h | 1 + base/include/OrderedCacheOfFiles.h | 1 + base/src/GtkGlRenderer.cpp | 159 +++++++++++------------------ base/src/H264Decoder.cpp | 46 +++++++-- base/src/H264Utils.cpp | 25 +++++ base/src/Model.cpp | 37 +------ base/src/Mp4ReaderSource.cpp | 42 ++++++-- base/src/OrderedCacheOfFiles.cpp | 29 ++++++ base/test/rtsp_client_tests.cpp | 6 +- 12 files changed, 292 insertions(+), 209 deletions(-) diff --git a/base/include/AIPExceptions.h b/base/include/AIPExceptions.h index d89596f66..f24370e26 100755 --- a/base/include/AIPExceptions.h +++ b/base/include/AIPExceptions.h @@ -45,66 +45,78 @@ class AIP_Exception : public std::runtime_error { -public: - /** Constructor (C++ STL strings). - * @param message The error message. - */ - explicit AIP_Exception(int type,const std::string file,int line,const std::string logMessage) : - runtime_error(std::to_string(type)) - { - if (type > AIP_FATAL) - { - AIPException_LOG_SEV(boost::log::trivial::severity_level::fatal,type) << file << ":" << line << ":" << logMessage.c_str(); - } - else - { - AIPException_LOG_SEV(boost::log::trivial::severity_level::error,type) << file << ":" << line << ":" << logMessage.c_str(); - } - - message = logMessage; - } - - /** Destructor. - * Virtual to allow for subclassing. - */ - virtual ~AIP_Exception() throw () {} - - int getCode() - { - return atoi(what()); - } - - std::string getError() - { - return message; - } - +public: + /** Constructor (C++ STL strings). + * @param message The error message. + */ + explicit AIP_Exception(int type,const std::string file,int line,const std::string logMessage) : + runtime_error(std::to_string(type)) + { + if (type > AIP_FATAL) + { + AIPException_LOG_SEV(boost::log::trivial::severity_level::fatal,type) << file << ":" << line << ":" << logMessage.c_str(); + } + else + { + AIPException_LOG_SEV(boost::log::trivial::severity_level::error,type) << file << ":" << line << ":" << logMessage.c_str(); + } + message = logMessage; + } + explicit AIP_Exception(int type,const std::string file,int line,const std::string logMessage, std::string _previosFile, std::string _nextFile) : + runtime_error(std::to_string(type)) + { + previousFile = _previosFile; + nextFile = _nextFile; + AIPException_LOG_SEV(boost::log::trivial::severity_level::error,type) << file << ":" << line << ":" << previousFile.c_str() << ":" << nextFile.c_str() << ":" << logMessage.c_str(); + } + /** Destructor. + * Virtual to allow for subclassing. + */ + virtual ~AIP_Exception() throw () {} + int getCode() + { + return atoi(what()); + } + std::string getError() + { + return message; + } + std::string getPreviousFile() + { + return previousFile; + } + std::string getNextFile() + { + return nextFile; + } private: - std::string message; + std::string message; + std::string previousFile; + std::string nextFile; }; - class Mp4_Exception : public AIP_Exception { public: - explicit Mp4_Exception(int type, const std::string file, int line, const std::string logMessage) : - AIP_Exception(type, file, line, logMessage) - { - } - - explicit Mp4_Exception(int type, const std::string file, int line, int _openFileErrorCode, const std::string logMessage) : - AIP_Exception(type, file, line, logMessage) - { - openFileErrorCode = _openFileErrorCode; - } - - int getOpenFileErrorCode() - { - return openFileErrorCode; - } - + explicit Mp4_Exception(int type, const std::string file, int line, const std::string logMessage) : + AIP_Exception(type, file, line, logMessage) + { + } + explicit Mp4_Exception(int type, const std::string file, int line, int _openFileErrorCode, const std::string logMessage) : + AIP_Exception(type, file, line, logMessage) + { + openFileErrorCode = _openFileErrorCode; + } + explicit Mp4_Exception(int type, const std::string file, int line, const std::string logMessage, std::string previosFile, std::string nextFile) : + AIP_Exception(type, file, line, logMessage, previosFile, nextFile) + { + } + int getOpenFileErrorCode() + { + return openFileErrorCode; + } private: - int openFileErrorCode = 0; + int openFileErrorCode = 0; }; - #define AIPException(_type,_message) AIP_Exception(_type,__FILE__,__LINE__,_message) #define Mp4Exception(_type,_message) Mp4_Exception(_type,__FILE__,__LINE__,_message) +#define Mp4ExceptionNoVideoTrack(_type,_message, _previosFile, _nextFile) Mp4_Exception(_type,__FILE__,__LINE__,_message,_previosFile,_nextFile) \ No newline at end of file diff --git a/base/include/Command.h b/base/include/Command.h index 5014a5f6b..ae1796b48 100755 --- a/base/include/Command.h +++ b/base/include/Command.h @@ -26,7 +26,8 @@ class Command MP4WriterLastTS, MMQtimestamps, Rendertimestamp, - RenderPlayPause + RenderPlayPause, + Mp4ErrorHandle }; Command() @@ -619,4 +620,30 @@ class RenderPlayPause : public Command ar& boost::serialization::base_object(*this); ar& pauseRenderer; } +}; + +class Mp4ErrorHandle : public Command +{ +public: + Mp4ErrorHandle() : Command(Command::CommandType::Mp4ErrorHandle) + { + } + + size_t getSerializeSize() + { + return Command::getSerializeSize() + sizeof(previousFile) + sizeof(nextFile); + } + + std::string previousFile; + std::string nextFile; + +private: + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /* file_version */) + { + ar& boost::serialization::base_object(*this); + ar& previousFile; + ar& nextFile; + } }; \ No newline at end of file diff --git a/base/include/GtkGlRenderer.h b/base/include/GtkGlRenderer.h index 4c0565d6d..002c797bc 100644 --- a/base/include/GtkGlRenderer.h +++ b/base/include/GtkGlRenderer.h @@ -2,6 +2,7 @@ #include "Module.h" #include // remove this +#include #include class GtkGlRendererProps : public ModuleProps { @@ -35,7 +36,12 @@ class GtkGlRenderer : public Module bool validateInputPins(); bool shouldTriggerSOS(); bool handleCommand(Command::CommandType type, frame_sp &frame); + void pushFrame(frame_sp frame); + void processQueue(); private: class Detail; boost::shared_ptr mDetail; + std::chrono::steady_clock::time_point lastFrameTime = std::chrono::steady_clock::now(); + std::queue frameQueue; + std::mutex queueMutex; }; diff --git a/base/include/H264Utils.h b/base/include/H264Utils.h index 6609deec5..d4a193918 100644 --- a/base/include/H264Utils.h +++ b/base/include/H264Utils.h @@ -30,4 +30,5 @@ class H264Utils { static H264_NAL_TYPE getNALUType(Frame *frm); static bool getNALUnit(const char *buffer, size_t length, size_t &offset); static std::tuple parseNalu(const const_buffer input); + static H264_NAL_TYPE getNalTypeAfterSpsPps(void* frameData, size_t frameSize); }; \ No newline at end of file diff --git a/base/include/OrderedCacheOfFiles.h b/base/include/OrderedCacheOfFiles.h index 44d368f64..c2591fae3 100644 --- a/base/include/OrderedCacheOfFiles.h +++ b/base/include/OrderedCacheOfFiles.h @@ -57,6 +57,7 @@ class OrderedCacheOfFiles void updateCache(std::string& filePath, uint64_t& start_ts, uint64_t& end_ts); // allow updates from playback std::map> getSnapShot(); // too costly, use for debugging only bool probe(boost::filesystem::path dirPath, std::string& videoName); + bool getPreviosAndNextFile(std::string videoPath, std::string& previousFile, std::string& nextFile); private: bool lastKnownPlaybackDir = true; // sync with mp4 playback boost::mutex m_mutex; diff --git a/base/src/GtkGlRenderer.cpp b/base/src/GtkGlRenderer.cpp index deb119a54..953bcaebd 100644 --- a/base/src/GtkGlRenderer.cpp +++ b/base/src/GtkGlRenderer.cpp @@ -36,7 +36,6 @@ class GtkGlRenderer::Detail static void on_resize(GtkGLArea *area, gint width, gint height, gpointer data) { - LOG_ERROR << "this pointer in resize is " << data; printf("In resize width = %d, height = %d\n", width, height); view_set_window(width, height); background_set_window(width, height); @@ -50,24 +49,7 @@ class GtkGlRenderer::Detail { //LOG_ERROR<<"DATA IN RENDER "<mProps.windowWidth<<" "<mProps.windowWidth == 2) - // { - // size_t bufferSize = static_cast(640) * 360 * 3; - // memset(glarea, 0, bufferSize); - // for (size_t i = 1; i < bufferSize; i += 3) { - // glarea[i] = 150; - // } - // } - - // Clear canvas: - - - //LOG_ERROR << "Window width in on_render is " <mProps.windowWidth; - // LOG_DEBUG << "Coming Inside Renderer"; - //LOG_ERROR << "GTKGL POINTER IS===========================>>>>"<< detailInstance->mProps.windowWidth<<" "<< glarea; + if (detailInstance->isMetadataSet == false) { LOG_TRACE << "Metadata is Not Set "; @@ -116,8 +98,7 @@ class GtkGlRenderer::Detail // model_draw(); // draw_frames(); drawCameraFrame(frameToRender, detailInstance->frameWidth, detailInstance->frameHeight); - //LOG_ERRO<<"Framestep is"<< detailInstance->step; - //drawCameraFrame(frameToRender, 1024, 1024); + // Don't propagate signal: return TRUE; @@ -126,19 +107,6 @@ class GtkGlRenderer::Detail static gboolean on_realize(GtkGLArea *glarea, GdkGLContext *context, gpointer data) // Process SOS { - //getting current time - std::chrono::time_point t = std::chrono::system_clock::now(); - auto dur = std::chrono::duration_cast(t.time_since_epoch()); - auto timeStamp = dur.count(); - auto diff = timeStamp - 1705559852000; - LOG_ERROR<<"On realize is called "; - LOG_ERROR<<"Time difference is "<mProps.windowWidth; + return TRUE; } - // on_unrealize() - // { - // // model_cleanup(); - // // background_cleanup(); - // // programs_cleanup(); - - // // Get the frame clock and disconnect the update signal - // GdkGLContext *glcontext = gtk_gl_area_get_context(GTK_GL_AREA(glarea)); - // GdkWindow *glwindow = gdk_gl_context_get_window(glcontext); - // GdkFrameClock *frame_clock = gdk_window_get_frame_clock(glwindow); - // g_signal_handlers_disconnect_by_func(frame_clock, G_CALLBACK(gtk_gl_area_queue_render), glarea); - // GtkWidget *parent_container = gtk_widget_get_parent(glarea); - - // // Remove the GtkGLArea from its parent container - // gtk_container_remove(GTK_CONTAINER(parent_container), glarea); - - // // Destroy the GtkGLArea widget - // gtk_widget_destroy(glarea); - // } - static void on_unrealize(GtkGLArea *glarea, gint width, gint height, gpointer data) { @@ -261,39 +207,18 @@ class GtkGlRenderer::Detail void connect_glarea_signals(GtkWidget *glarea) - { - // {"resize", G_CALLBACK(on_resize), (GdkEventMask)0}, - // {"scroll-event", G_CALLBACK(on_scroll), GDK_SCROLL_MASK}, - //connect_signals(glarea, signals, NELEM(signals)); - - + { std::chrono::time_point t = std::chrono::system_clock::now(); auto dur = std::chrono::duration_cast(t.time_since_epoch()); auto timeStamp = dur.count(); renderId = g_signal_connect(glarea, "render", G_CALLBACK(on_render), this); realizeId = g_signal_connect(glarea, "realize", G_CALLBACK(on_realize), this); resizeId = g_signal_connect(glarea, "resize", G_CALLBACK(on_resize), this); - LOG_ERROR<<"Connect to renderId "<>>>>>>"; - // // g_signal_handler_disconnect(glarea, realizeId); - // // g_signal_handler_disconnect(glarea, renderId); - // // g_signal_handler_disconnect(glarea, resizeId); - // } - void disconnect_glarea_signals(GtkWidget *glarea) { - // g_signal_handlers_disconnect_by_func(glarea, G_CALLBACK(on_realize), this); - // g_signal_handlers_disconnect_by_func(glarea, G_CALLBACK(on_render), this); - // g_signal_handlers_disconnect_by_func(glarea, G_CALLBACK(on_resize), this); - LOG_ERROR << "disconnect_glarea_signals===================================>>>>>>>"; g_signal_handler_disconnect(glarea, realizeId); g_signal_handler_disconnect(glarea, renderId); g_signal_handler_disconnect(glarea, resizeId); @@ -301,7 +226,6 @@ class GtkGlRenderer::Detail bool init() { - LOG_ERROR << "MDETAIL GLAREA -> "<< glarea; connect_glarea_signals(glarea); // initialize_gl(GTK_GL_AREA(glarea)); return true; @@ -346,28 +270,63 @@ bool GtkGlRenderer::init() } bool GtkGlRenderer::process(frame_container &frames) - { - auto myId = Module::getId(); - // LOG_ERROR << "GOT " - auto frame = frames.cbegin()->second; - mDetail->cachedFrame = frame; - size_t underscorePos = myId.find('_'); - std::string numericPart = myId.substr(underscorePos + 1); - int myNumber = std::stoi(numericPart); - - if ((controlModule != nullptr) && (myNumber % 2 == 1)) - { - Rendertimestamp cmd; - auto myTime = frames.cbegin()->second->timestamp; - cmd.currentTimeStamp = myTime; - controlModule->queueCommand(cmd); - //LOG_ERROR << "myID is GtkGlRendererModule_ "< lock(queueMutex); + frameQueue.push(frame); +} + +void GtkGlRenderer::processQueue() +{ + auto currentTime = std::chrono::steady_clock::now(); + auto timeDiff = std::chrono::duration_cast(currentTime - lastFrameTime).count(); + + std::lock_guard lock(queueMutex); + if (!frameQueue.empty()) + { + auto frame = frameQueue.front(); + frameQueue.pop(); + auto myId = Module::getId(); + if(myId == "GtkGlRenderer_35") + { + // LOG_INFO << "time diff is = " << timeDiff << "Timestamp is = " << frame->timestamp; + } + + if (timeDiff >= 33) + { + // LOG_ERROR << "GOT " + mDetail->cachedFrame = frame; + size_t underscorePos = myId.find('_'); + std::string numericPart = myId.substr(underscorePos + 1); + int myNumber = std::stoi(numericPart); + + if ((controlModule != nullptr) && (myNumber % 2 == 1)) + { + Rendertimestamp cmd; + auto myTime = frame->timestamp; + cmd.currentTimeStamp = myTime; + controlModule->queueCommand(cmd); + // LOG_ERROR << "myID is GtkGlRendererModule_ "<on_unrealize(); - mDetail->disconnect_glarea_signals(mDetail->glarea); + mDetail->disconnect_glarea_signals(GTK_WIDGET(mDetail->glarea)); mDetail->glarea = glArea; mDetail->windowWidth = windowWidth; mDetail->windowHeight = windowHeight; mDetail->init(); - gtk_widget_show(glArea); + gtk_widget_show(GTK_WIDGET(glArea)); } bool GtkGlRenderer::shouldTriggerSOS() diff --git a/base/src/H264Decoder.cpp b/base/src/H264Decoder.cpp index badd0c69e..08bb16690 100644 --- a/base/src/H264Decoder.cpp +++ b/base/src/H264Decoder.cpp @@ -204,8 +204,13 @@ void H264Decoder::bufferBackwardEncodedFrames(frame_sp& frame, short naluType) } // insert frames into the latest gop until I frame comes. latestBackwardGop.emplace_back(frame); + H264Utils::H264_NAL_TYPE nalTypeAfterSpsPps; + if(naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + nalTypeAfterSpsPps = H264Utils::getNalTypeAfterSpsPps(frame->data(), frame->size()); + } // The latest GOP is complete when I Frame comes up, move the GOP to backwardGopBuffer where all the backward GOP's are buffered - if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || nalTypeAfterSpsPps == H264Utils::H264_NAL_TYPE_IDR_SLICE) { foundIFrameOfReverseGop = true; backwardGopBuffer.push_back(std::move(latestBackwardGop)); @@ -235,7 +240,12 @@ void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short nal if (!latestForwardGop.empty()) { short naluTypeOfForwardGopFirstFrame = H264Utils::getNALUType((char*)latestForwardGop.front()->data()); - if (naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE || naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + H264Utils::H264_NAL_TYPE nalTypeAfterSpsPpsOfForwardGopFirstFrame; + if(naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + nalTypeAfterSpsPpsOfForwardGopFirstFrame = H264Utils::getNalTypeAfterSpsPps(latestForwardGop.front()->data(), latestForwardGop.front()->size()); + } + if (naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE || nalTypeAfterSpsPpsOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) { // Corner case: Forward :- current frame is not part of latestForwardGOP if (latestForwardGop.front()->timestamp > frame->timestamp) @@ -261,7 +271,7 @@ void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short nal } } } - else if (!latestForwardGop.empty() && naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + else if (!latestForwardGop.empty() && nalTypeAfterSpsPpsOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) { for (auto itr = latestForwardGop.begin(); itr != latestForwardGop.end(); itr++) { @@ -277,7 +287,12 @@ void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short nal /* buffer fwd GOP and send the current frame */ // new GOP starts - if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + H264Utils::H264_NAL_TYPE nalTypeAfterSpsPpsOfCurrentFrame; + if(naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + nalTypeAfterSpsPpsOfCurrentFrame = H264Utils::getNalTypeAfterSpsPps(frame->data(), frame->size()); + } + if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || nalTypeAfterSpsPpsOfCurrentFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) { latestForwardGop.clear(); } @@ -286,7 +301,12 @@ void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short nal // If direction changed to forward in the middle of GOP (Even the latest gop of backward was half and not decoded) , Then we drop the P frames until next I frame. // We also remove the entries of P frames from the incomingFramesTSQ. short latestForwardGopFirstFrameNaluType = H264Utils::getNALUType((char*)latestForwardGop.begin()->get()->data()); - if (latestForwardGopFirstFrameNaluType != H264Utils::H264_NAL_TYPE_IDR_SLICE && latestForwardGopFirstFrameNaluType != H264Utils::H264_NAL_TYPE_SEQ_PARAM) + H264Utils::H264_NAL_TYPE naluTypeAfterSpsPpsOfLatestForwardGopFirstFrame; + if(latestForwardGopFirstFrameNaluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + naluTypeAfterSpsPpsOfLatestForwardGopFirstFrame = H264Utils::getNalTypeAfterSpsPps(latestForwardGop.front()->data(), latestForwardGop.front()->size()); + } + if (latestForwardGopFirstFrameNaluType != H264Utils::H264_NAL_TYPE_IDR_SLICE && naluTypeAfterSpsPpsOfLatestForwardGopFirstFrame != H264Utils::H264_NAL_TYPE_IDR_SLICE) { clearIncompleteBwdGopTsFromIncomingTSQ(latestForwardGop); return; @@ -342,8 +362,13 @@ void H264Decoder::saveSpsPps(frame_sp frame) } bool H264Decoder::process(frame_container& frames) -{ +{ auto frame = frames.begin()->second; + auto myId = Module::getId(); + // if (myId == "H264Decoder_42") + // { + // LOG_INFO << "Timestamp is = " << frame->timestamp; + // } auto frameMetadata = frame->getMetadata(); auto h264Metadata = FrameMetadataFactory::downcast(frameMetadata); @@ -425,13 +450,18 @@ bool H264Decoder::process(frame_container& frames) return true; } - if (mDirection && ((naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) || (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE))) + H264Utils::H264_NAL_TYPE nalTypeAfterSpsPpsCurrentFrame; + if(naluType == H264Utils::H264_NAL_TYPE_SEQ_PARAM) + { + nalTypeAfterSpsPpsCurrentFrame = H264Utils::getNalTypeAfterSpsPps(frame->data(), frame->size()); + } + if (mDirection && ((nalTypeAfterSpsPpsCurrentFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) || (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE))) { latestForwardGop.clear(); latestForwardGop.push_back(frame); } // dont buffer fwd GOP if I frame has not been recieved (possible in intra GOP direction change cases) - else if (mDirection && !latestForwardGop.empty() && (H264Utils::getNALUType((char*)latestForwardGop.front()->data()) == H264Utils::H264_NAL_TYPE_SEQ_PARAM || H264Utils::getNALUType((char*)latestForwardGop.front()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE)) + else if (mDirection && !latestForwardGop.empty() && (nalTypeAfterSpsPpsCurrentFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE || H264Utils::getNALUType((char*)latestForwardGop.front()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE)) { flushDecoderFlag = false; latestForwardGop.push_back(frame); diff --git a/base/src/H264Utils.cpp b/base/src/H264Utils.cpp index f9dc37288..23c72d21d 100644 --- a/base/src/H264Utils.cpp +++ b/base/src/H264Utils.cpp @@ -86,3 +86,28 @@ std::tuple H264Utils::parseNalu(const const_b typeFound = getNALUType(p1 + offset - 4); return { typeFound, const_buffer(), const_buffer() }; } + +H264Utils::H264_NAL_TYPE H264Utils::getNalTypeAfterSpsPps(void* frameData, size_t frameSize) +{ + char* p1 = reinterpret_cast(const_cast(frameData)); + size_t offset = 0; + auto typeFound = getNALUType(p1); + + if (typeFound == H264_NAL_TYPE::H264_NAL_TYPE_SEQ_PARAM) + { + if (getNALUnit(p1, frameSize, offset)) // where does it start + { + p1 = p1 + offset; + offset = 0; + + if (getNALUnit(p1, frameSize, offset)) // where does it end + { + p1 = p1 + offset; + if (getNALUnit(p1, frameSize, offset)) + { + typeFound = getNALUType(p1 + offset - 4); // always looks at 5th byte + } + } + } + } +} diff --git a/base/src/Model.cpp b/base/src/Model.cpp index 049204a29..fdf9d78cb 100644 --- a/base/src/Model.cpp +++ b/base/src/Model.cpp @@ -52,40 +52,6 @@ model_init (void) glBindTexture(GL_TEXTURE_2D, textureID); } -void saveTextureToTextFile(const char* filename, int width, int height) { - GLubyte* pixels = new GLubyte[width * height * 4]; // Assuming RGBA format - - // Read pixel data from the framebuffer (assuming the texture is bound to the framebuffer) - glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, pixels); - - // Open the file for writing - std::ofstream outFile(filename, std::ios::out); - if (!outFile.is_open()) { - std::cerr << "Failed to open the file: " << filename << std::endl; - delete[] pixels; - return; - } - - // Write width and height to the file - outFile << width << " " << height << std::endl; - - // Write pixel values to the file - for (int y = 0; y < height; ++y) { - for (int x = 0; x < width; ++x) { - int index = (y * width + x) * 4; // Assuming RGBA format - outFile << static_cast(pixels[index]) << " "; - outFile << static_cast(pixels[index + 1]) << " "; - outFile << static_cast(pixels[index + 2]) << " "; - outFile << static_cast(pixels[index + 3]) << " "; // Alpha channel - } - outFile << std::endl; - } - - outFile.close(); - delete[] pixels; -} - - void matToTexture(unsigned char* buffer , GLenum minFilter, GLenum magFilter, GLenum wrapFilter, int width, int height) { // Catch silly-mistake texture interpolation method for magnification @@ -138,7 +104,6 @@ void matToTexture(unsigned char* buffer , GLenum minFilter, GLenum magFilter, GL // std::cout << "Will Generate minmap" << std::endl; glGenerateMipmap(GL_TEXTURE_2D); } - // saveTextureToTextFile("newvindows.png", 1280, 720); } void drawCameraFrame(void* frameData, int width, int height){ @@ -154,7 +119,7 @@ void drawCameraFrame(void* frameData, int width, int height){ // Draw all the triangles in the buffer: glBindVertexArray(vao); glDrawArrays(GL_QUADS, 0, 4); - } +} const float * diff --git a/base/src/Mp4ReaderSource.cpp b/base/src/Mp4ReaderSource.cpp index 90019a33e..c42550241 100644 --- a/base/src/Mp4ReaderSource.cpp +++ b/base/src/Mp4ReaderSource.cpp @@ -510,7 +510,10 @@ class Mp4ReaderDetailAbs { auto msg = "No Videotrack found in the video <" + mState.mVideoPath + ">"; LOG_ERROR << msg; - throw Mp4Exception(MP4_MISSING_VIDEOTRACK, msg); + std::string previousFile; + std::string nextFile; + cof->getPreviosAndNextFile(mState.mVideoPath, previousFile, nextFile); + throw Mp4ExceptionNoVideoTrack(MP4_MISSING_VIDEOTRACK, msg, previousFile, nextFile); } // starting timestamp of the video will either come from the video name or the header @@ -752,6 +755,17 @@ class Mp4ReaderDetailAbs } catch (Mp4_Exception& ex) { + if(ex.getCode() == MP4_MISSING_VIDEOTRACK) + { + if ((controlModule != nullptr)) + { + Mp4ErrorHandle cmd; + cmd.previousFile = ex.getPreviousFile(); + cmd.nextFile = ex.getNextFile(); + controlModule->queueCommand(cmd, true); + return false; + } + } makeAndSendMp4Error(Mp4ErrorFrame::MP4_SEEK, ex.getCode(), ex.getError(), ex.getOpenFileErrorCode(), skipTS); return false; } @@ -792,6 +806,17 @@ class Mp4ReaderDetailAbs } catch (Mp4_Exception& ex) { + if(ex.getCode() == MP4_MISSING_VIDEOTRACK) + { + if ((controlModule != nullptr)) + { + Mp4ErrorHandle cmd; + cmd.previousFile = ex.getPreviousFile(); + cmd.nextFile = ex.getNextFile(); + controlModule->queueCommand(cmd, true); + return; + } + } imgSize = 0; // send the last frame timestamp makeAndSendMp4Error(Mp4ErrorFrame::MP4_STEP, ex.getCode(), ex.getError(), ex.getOpenFileErrorCode(), mState.frameTSInMsecs); @@ -1055,6 +1080,7 @@ class Mp4ReaderDetailAbs std::string h264ImagePinId; std::string encodedImagePinId; std::string metadataFramePinId; + boost::shared_ptr controlModule = nullptr; }; class Mp4ReaderDetailJpeg : public Mp4ReaderDetailAbs @@ -1303,12 +1329,15 @@ bool Mp4ReaderDetailH264::produceFrames(frame_container& frames) { frameData[3] = 0x1; frameData[spsSize + 7] = 0x1; + frameData[spsSize + ppsSize + 8] = 0x0; frameData[spsSize + ppsSize + 9] = 0x0; frameData[spsSize + ppsSize + 10] = 0x0; frameData[spsSize + ppsSize + 11] = 0x1; } else { + frameData[0] = 0x0; + frameData[1] = 0x0; frameData[2] = 0x0; frameData[3] = 0x1; } @@ -1366,7 +1395,6 @@ Mp4ReaderSource::~Mp4ReaderSource() {} bool Mp4ReaderSource::init() { - LOG_ERROR<<"MP4READER INIT!!!!!!"; if (!Module::init()) { return false; @@ -1403,7 +1431,7 @@ bool Mp4ReaderSource::init() mDetail->encodedImagePinId = encodedImagePinId; mDetail->h264ImagePinId = h264ImagePinId; mDetail->metadataFramePinId = metadataFramePinId; - LOG_ERROR<<"MP4READER INIT ENNND!!!!!!"; + mDetail->controlModule = controlModule; return mDetail->Init(); } @@ -1559,9 +1587,9 @@ bool Mp4ReaderSource::handleCommand(Command::CommandType type, frame_sp& frame) { Mp4SeekCommand seekCmd; getCommand(seekCmd, frame); - LOG_ERROR<<"seek play 1 "; + //LOG_ERROR<<"seek play 1 "; return mDetail->randomSeek(seekCmd.seekStartTS, seekCmd.forceReopen); - LOG_ERROR<<"seek play 2 "; + //LOG_ERROR<<"seek play 2 "; } else { @@ -1571,10 +1599,10 @@ bool Mp4ReaderSource::handleCommand(Command::CommandType type, frame_sp& frame) bool Mp4ReaderSource::handlePausePlay(float speed, bool direction) { - LOG_ERROR<<"hanlde play 1 "; + //LOG_ERROR<<"hanlde play 1 "; mDetail->setPlayback(speed, direction); return Module::handlePausePlay(speed, direction); - LOG_ERROR<<"hanlde play 2 "; + //LOG_ERROR<<"hanlde play 2 "; } bool Mp4ReaderSource::randomSeek(uint64_t skipTS, bool forceReopen) diff --git a/base/src/OrderedCacheOfFiles.cpp b/base/src/OrderedCacheOfFiles.cpp index 91693cdbb..009b9f75f 100644 --- a/base/src/OrderedCacheOfFiles.cpp +++ b/base/src/OrderedCacheOfFiles.cpp @@ -152,6 +152,35 @@ bool OrderedCacheOfFiles::probe(boost::filesystem::path potentialMp4File, std::s return false; } +bool OrderedCacheOfFiles::getPreviosAndNextFile(std::string videoPath, std::string& previousFile, std::string& nextFile) +{ + auto videoIter = videoCache.find(videoPath); + videoIter++; + if (videoIter == videoCache.end()) + { + nextFile = ""; + videoIter--; + videoIter--; + if(videoIter == videoCache.end()) + { + previousFile = ""; + return false; + } + previousFile = videoIter->path; + return true; + } + nextFile = videoIter->path; + videoIter--; + videoIter--; + if (videoIter == videoCache.end()) + { + previousFile = ""; + return false; + } + previousFile = videoIter->path; + return true; +} + /* Important Note: **UNRELIABLE METHOD - Use ONLY if you know what you are doing.** diff --git a/base/test/rtsp_client_tests.cpp b/base/test/rtsp_client_tests.cpp index 6bc111593..c79a0b498 100644 --- a/base/test/rtsp_client_tests.cpp +++ b/base/test/rtsp_client_tests.cpp @@ -14,7 +14,7 @@ BOOST_AUTO_TEST_SUITE(rtsp_client_tests) struct rtsp_client_tests_data { rtsp_client_tests_data() { - outFile = string("./data/testOutput/bunny.h264"); + outFile = string("./data/testOutput/bunny_????.h264"); Test_Utils::FileCleaner fc; fc.pathsOfFiles.push_back(outFile); //clear any occurance before starting the tests } @@ -28,14 +28,14 @@ BOOST_AUTO_TEST_CASE(basic, *boost::unit_test::disabled()) //drop bunny/mp4 into evostream folder, //also set it up for RTSP client authentication as shown here: https://sites.google.com/apra.in/development/home/evostream/rtsp-authentication?authuser=1 - auto url=string("rtsp://user1:password1@127.0.0.1:5544/vod/mp4:bunny.mp4"); + auto url=string("rtsp://10.102.10.75/axis-media/media.amp?resolution=1280x720"); auto m = boost::shared_ptr(new RTSPClientSrc(RTSPClientSrcProps(url, d.empty, d.empty))); auto meta = framemetadata_sp(new H264Metadata()); m->addOutputPin(meta); //filewriter for saving output - auto fw = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps(d.outFile, true))); + auto fw = boost::shared_ptr(new FileWriterModule(FileWriterModuleProps(d.outFile))); m->setNext(fw); From 05bd236faf8c54d20aa2bd0fb2ee4bc3ed66ecdc Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Tue, 13 Feb 2024 19:37:55 +0530 Subject: [PATCH 16/19] Resolved plyback jitteriness issue --- base/CMakeLists.txt | 2 +- base/src/GtkGlRenderer.cpp | 28 ++++++++++++++++++---------- base/src/H264Decoder.cpp | 4 ++++ base/src/Mp4ReaderSource.cpp | 27 +++++++++++++++++++++------ base/src/MultimediaQueueXform.cpp | 4 ++++ 5 files changed, 48 insertions(+), 17 deletions(-) diff --git a/base/CMakeLists.txt b/base/CMakeLists.txt index 3332d8b3d..8c073342b 100755 --- a/base/CMakeLists.txt +++ b/base/CMakeLists.txt @@ -106,7 +106,7 @@ IF(ENABLE_CUDA) include_directories(AFTER SYSTEM /usr/include/gdk-pixbuf-2.0/) include_directories(AFTER SYSTEM /usr/local/cuda/samples/common/inc/) include_directories(AFTER SYSTEM /usr/include/) - include_directories(AFTER SYSTEM /mnt/disks/ssd/NVR/apranvr/thirdparty/ApraGTKUtils/includes/) + include_directories(AFTER SYSTEM ${CMAKE_CURRENT_SOURCE_DIR}/thirdparty/ApraGTKUtils/includes/) ELSEIF(ENABLE_LINUX) find_library(LIBNVCUVID libnvcuvid.so PATHS ../thirdparty/Video_Codec_SDK_10.0.26/Lib/linux/stubs/x86_64 NO_DEFAULT_PATH) find_library(LIBNVENCODE libnvidia-encode.so PATHS ../thirdparty/Video_Codec_SDK_10.0.26/Lib/linux/stubs/x86_64 NO_DEFAULT_PATH) diff --git a/base/src/GtkGlRenderer.cpp b/base/src/GtkGlRenderer.cpp index 953bcaebd..e2f68e325 100644 --- a/base/src/GtkGlRenderer.cpp +++ b/base/src/GtkGlRenderer.cpp @@ -270,17 +270,25 @@ bool GtkGlRenderer::init() } bool GtkGlRenderer::process(frame_container &frames) + { - for (const auto &pair : frames) - { - auto frame = pair.second; - if (!isFrameEmpty(frame)) - { - pushFrame(frame); - } - } - processQueue(); - + auto myId = Module::getId(); + // LOG_ERROR << "GOT " + auto frame = frames.cbegin()->second; + mDetail->cachedFrame = frame; + size_t underscorePos = myId.find('_'); + std::string numericPart = myId.substr(underscorePos + 1); + int myNumber = std::stoi(numericPart); + + if ((controlModule != nullptr) && (myNumber % 2 == 1)) + { + Rendertimestamp cmd; + auto myTime = frames.cbegin()->second->timestamp; + cmd.currentTimeStamp = myTime; + controlModule->queueCommand(cmd); + //LOG_ERROR << "myID is GtkGlRendererModule_ "<= 300) + { + flushQue(); + } auto frame = frames.begin()->second; auto myId = Module::getId(); // if (myId == "H264Decoder_42") diff --git a/base/src/Mp4ReaderSource.cpp b/base/src/Mp4ReaderSource.cpp index c42550241..ddc67d66b 100644 --- a/base/src/Mp4ReaderSource.cpp +++ b/base/src/Mp4ReaderSource.cpp @@ -17,7 +17,8 @@ class Mp4ReaderDetailAbs public: Mp4ReaderDetailAbs(Mp4ReaderSourceProps& props, std::function _makeFrame, std::function _makeFrameTrim, std::function _sendEOS, - std::function _setMetadata, std::function _sendMp4ErrorFrame) + std::function _setMetadata, std::function _sendMp4ErrorFrame, + std::function _setProps) { setProps(props); makeFrame = _makeFrame; @@ -25,6 +26,7 @@ class Mp4ReaderDetailAbs sendEOS = _sendEOS; mSetMetadata = _setMetadata; sendMp4ErrorFrame = _sendMp4ErrorFrame; + setMp4ReaderProps = _setProps; cof = boost::shared_ptr(new OrderedCacheOfFiles(mProps.skipDir)); } @@ -503,6 +505,13 @@ class Mp4ReaderDetailAbs mDirection = mState.direction; mDurationInSecs = mState.info.duration / mState.info.timescale; mFPS = mState.mFramesInVideo / mDurationInSecs; + if ((controlModule != nullptr)) + { + mProps.fps = mFPS; + LOG_INFO << "fps of new video is = " << mFPS; + setMp4ReaderProps(mProps); + LOG_INFO << "did set Mp4reader props"; + } } } @@ -1077,6 +1086,7 @@ class Mp4ReaderDetailAbs std::function makeFrameTrim; std::function sendMp4ErrorFrame; std::function mSetMetadata; + std::function setMp4ReaderProps; std::string h264ImagePinId; std::string encodedImagePinId; std::string metadataFramePinId; @@ -1087,7 +1097,7 @@ class Mp4ReaderDetailJpeg : public Mp4ReaderDetailAbs { public: Mp4ReaderDetailJpeg(Mp4ReaderSourceProps& props, std::function _makeFrame, - std::function _makeFrameTrim, std::function _sendEOS, std::function _setMetadata, std::function _sendMp4ErrorFrame) : Mp4ReaderDetailAbs(props, _makeFrame, _makeFrameTrim, _sendEOS, _setMetadata, _sendMp4ErrorFrame) + std::function _makeFrameTrim, std::function _sendEOS, std::function _setMetadata, std::function _sendMp4ErrorFrame, std::function _setProps) : Mp4ReaderDetailAbs(props, _makeFrame, _makeFrameTrim, _sendEOS, _setMetadata, _sendMp4ErrorFrame, _setProps) {} ~Mp4ReaderDetailJpeg() {} void setMetadata(); @@ -1100,7 +1110,7 @@ class Mp4ReaderDetailH264 : public Mp4ReaderDetailAbs { public: Mp4ReaderDetailH264(Mp4ReaderSourceProps& props, std::function _makeFrame, - std::function _makeFrameTrim, std::function _sendEOS, std::function _setMetadata, std::function _sendMp4ErrorFrame) : Mp4ReaderDetailAbs(props, _makeFrame, _makeFrameTrim, _sendEOS, _setMetadata, _sendMp4ErrorFrame) + std::function _makeFrameTrim, std::function _sendEOS, std::function _setMetadata, std::function _sendMp4ErrorFrame, std::function _setProps) : Mp4ReaderDetailAbs(props, _makeFrame, _makeFrameTrim, _sendEOS, _setMetadata, _sendMp4ErrorFrame, _setProps) {} ~Mp4ReaderDetailH264() {} void setMetadata(); @@ -1413,7 +1423,9 @@ bool Mp4ReaderSource::init() {return Module::sendEOS(frame); }, [&](std::string& pinId, framemetadata_sp& metadata) { return setImageMetadata(pinId, metadata); }, - [&](frame_sp& frame) {return Module::sendMp4ErrorFrame(frame); })); + [&](frame_sp& frame) {return Module::sendMp4ErrorFrame(frame); }, + [&](Mp4ReaderSourceProps& props) + {return setProps(props); })); } else if (mFrameType == FrameMetadata::FrameType::H264_DATA) { @@ -1426,7 +1438,10 @@ bool Mp4ReaderSource::init() {return Module::sendEOS(frame); }, [&](std::string& pinId, framemetadata_sp& metadata) { return setImageMetadata(pinId, metadata); }, - [&](frame_sp& frame) {return Module::sendMp4ErrorFrame(frame); })); + [&](frame_sp& frame) + {return Module::sendMp4ErrorFrame(frame); }, + [&](Mp4ReaderSourceProps& props) + {return setProps(props); })); } mDetail->encodedImagePinId = encodedImagePinId; mDetail->h264ImagePinId = h264ImagePinId; @@ -1572,7 +1587,7 @@ bool Mp4ReaderSource::handlePropsChange(frame_sp& frame) void Mp4ReaderSource::setProps(Mp4ReaderSourceProps& props) { - Module::addPropsToQueue(props); + Module::addPropsToQueue(props, true); } bool Mp4ReaderSource::changePlayback(float speed, bool direction) diff --git a/base/src/MultimediaQueueXform.cpp b/base/src/MultimediaQueueXform.cpp index ace947505..8c58e2724 100644 --- a/base/src/MultimediaQueueXform.cpp +++ b/base/src/MultimediaQueueXform.cpp @@ -734,6 +734,10 @@ bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& fr } if (direction) { + if(it == mState->queueObject->mQueue.end()) + { + break; + } it++; if (it == mState->queueObject->mQueue.end()) { From d49127c39456a0e91a1984ddfc768c2507ac5b6b Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Tue, 13 Feb 2024 19:55:23 +0530 Subject: [PATCH 17/19] increased decoder buffered frames limit --- base/src/H264Decoder.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/base/src/H264Decoder.cpp b/base/src/H264Decoder.cpp index 85fc569e9..5cdbd7f51 100644 --- a/base/src/H264Decoder.cpp +++ b/base/src/H264Decoder.cpp @@ -363,7 +363,7 @@ void H264Decoder::saveSpsPps(frame_sp frame) bool H264Decoder::process(frame_container& frames) { - if(incomingFramesTSQ.size() >= 300) + if(incomingFramesTSQ.size() >= 1000) { flushQue(); } From e87b541d6c57f6167be6c1a9144cc4c3b96e094e Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Wed, 14 Feb 2024 19:53:35 +0530 Subject: [PATCH 18/19] Resolved decoder race conditions --- base/include/H264Decoder.h | 1 + base/src/H264Decoder.cpp | 30 ++++++++++++++++++++++++++++++ base/src/H264DecoderV4L2Helper.cpp | 2 +- base/src/H264DecoderV4L2Helper.h | 2 +- 4 files changed, 33 insertions(+), 2 deletions(-) diff --git a/base/include/H264Decoder.h b/base/include/H264Decoder.h index 2e8b2781a..494768d25 100644 --- a/base/include/H264Decoder.h +++ b/base/include/H264Decoder.h @@ -73,4 +73,5 @@ class H264Decoder : public Module frame_sp mHeaderFrame; boost::asio::const_buffer spsBuffer; boost::asio::const_buffer ppsBuffer; + std::mutex m; }; diff --git a/base/src/H264Decoder.cpp b/base/src/H264Decoder.cpp index 5cdbd7f51..882fc96ab 100644 --- a/base/src/H264Decoder.cpp +++ b/base/src/H264Decoder.cpp @@ -184,6 +184,7 @@ void* H264Decoder::prependSpsPps(frame_sp& iFrame, size_t& spsPpsFrameSize) void H264Decoder::clearIncompleteBwdGopTsFromIncomingTSQ(std::deque& latestGop) { + m.lock(); while (!latestGop.empty() && !incomingFramesTSQ.empty()) { auto deleteItr = std::find(incomingFramesTSQ.begin(), incomingFramesTSQ.end(), latestGop.front()->timestamp); @@ -193,6 +194,7 @@ void H264Decoder::clearIncompleteBwdGopTsFromIncomingTSQ(std::deque& l latestGop.pop_front(); } } + m.unlock(); } void H264Decoder::bufferBackwardEncodedFrames(frame_sp& frame, short naluType) @@ -213,7 +215,9 @@ void H264Decoder::bufferBackwardEncodedFrames(frame_sp& frame, short naluType) if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || nalTypeAfterSpsPps == H264Utils::H264_NAL_TYPE_IDR_SLICE) { foundIFrameOfReverseGop = true; + m.lock(); backwardGopBuffer.push_back(std::move(latestBackwardGop)); + m.unlock(); } } @@ -250,12 +254,15 @@ void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short nal // Corner case: Forward :- current frame is not part of latestForwardGOP if (latestForwardGop.front()->timestamp > frame->timestamp) { + m.lock(); latestForwardGop.clear(); + m.unlock(); } } // Corner case: Forward:- When end of cache hits while in the middle of gop, before decoding the next P frame we need decode the previous frames of that GOP. // There might be a case where we might have cleared the decoder, in order to start the decoder again we must prepend sps and pps to I frame if not present + m.lock(); if (!latestForwardGop.empty() && naluTypeOfForwardGopFirstFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) { auto iFrame = latestForwardGop.front(); @@ -281,6 +288,7 @@ void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short nal } } } + m.unlock(); } } prevFrameInCache = false; @@ -294,9 +302,13 @@ void H264Decoder::bufferAndDecodeForwardEncodedFrames(frame_sp& frame, short nal } if (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE || nalTypeAfterSpsPpsOfCurrentFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) { + m.lock(); latestForwardGop.clear(); + m.unlock(); } + m.lock(); latestForwardGop.emplace_back(frame); + m.unlock(); // If direction changed to forward in the middle of GOP (Even the latest gop of backward was half and not decoded) , Then we drop the P frames until next I frame. // We also remove the entries of P frames from the incomingFramesTSQ. @@ -320,13 +332,18 @@ void H264Decoder::decodeFrameFromBwdGOP() { if (!backwardGopBuffer.empty() && H264Utils::getNALUType((char*)backwardGopBuffer.front().back()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE && prevFrameInCache) { + m.lock(); auto iFrame = backwardGopBuffer.front().back(); + m.unlock(); size_t spsPpsFrameSize; auto spsPpsFrameBuffer = prependSpsPps(iFrame, spsPpsFrameSize); mDetail->compute(spsPpsFrameBuffer, spsPpsFrameSize, iFrame->timestamp); + m.lock(); backwardGopBuffer.front().pop_back(); + m.unlock(); prevFrameInCache = false; } + m.lock(); if (!backwardGopBuffer.empty() && !backwardGopBuffer.front().empty()) { // For reverse play we sent the frames to the decoder in reverse, As the last frame added in the deque should be sent first (Example : P,P,P,P,P,P,I) @@ -338,6 +355,7 @@ void H264Decoder::decodeFrameFromBwdGOP() { backwardGopBuffer.pop_front(); } + m.unlock(); if (backwardGopBuffer.empty()) { foundIFrameOfReverseGop = false; @@ -394,7 +412,9 @@ bool H264Decoder::process(frame_container& frames) We dont clear backwardGOP because there might be a left over GOP to be decoded. */ if (h264Metadata->mp4Seek) { + m.lock(); latestForwardGop.clear(); + m.unlock(); } mDirection = h264Metadata->direction; @@ -418,11 +438,15 @@ bool H264Decoder::process(frame_container& frames) //prepend sps and pps if 1st frame is I frame if (!backwardGopBuffer.empty() && H264Utils::getNALUType((char*)backwardGopBuffer.front().back()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE) { + m.lock(); auto iFrame = backwardGopBuffer.front().back(); + m.unlock(); size_t spsPpsFrameSize; auto spsPpsFrameBuffer = prependSpsPps(iFrame, spsPpsFrameSize); mDetail->compute(spsPpsFrameBuffer, spsPpsFrameSize, iFrame->timestamp); + m.lock(); backwardGopBuffer.front().pop_back(); + m.unlock(); } // the buffered GOPs in bwdGOPBuffer needs to need to be processed first while (!backwardGopBuffer.empty()) @@ -461,14 +485,18 @@ bool H264Decoder::process(frame_container& frames) } if (mDirection && ((nalTypeAfterSpsPpsCurrentFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE) || (naluType == H264Utils::H264_NAL_TYPE_IDR_SLICE))) { + m.lock(); latestForwardGop.clear(); latestForwardGop.push_back(frame); + m.unlock(); } // dont buffer fwd GOP if I frame has not been recieved (possible in intra GOP direction change cases) else if (mDirection && !latestForwardGop.empty() && (nalTypeAfterSpsPpsCurrentFrame == H264Utils::H264_NAL_TYPE_IDR_SLICE || H264Utils::getNALUType((char*)latestForwardGop.front()->data()) == H264Utils::H264_NAL_TYPE_IDR_SLICE)) { + m.lock(); flushDecoderFlag = false; latestForwardGop.push_back(frame); + m.unlock(); } // While in forward play, if cache has resumed in the middle of the GOP then to get the previous few frames we need to flush the decoder. @@ -641,11 +669,13 @@ void H264Decoder::flushQue() { if (!incomingFramesTSQ.empty()) { + m.lock(); LOG_ERROR << "clearing decoder cache and clear ts = " << incomingFramesTSQ.size(); incomingFramesTSQ.clear(); latestBackwardGop.clear(); latestForwardGop.clear(); backwardGopBuffer.clear(); + m.unlock(); auto frame = frame_sp(new EmptyFrame()); LOG_ERROR << "does it compute"; mDetail->compute(frame->data(), frame->size(), frame->timestamp); diff --git a/base/src/H264DecoderV4L2Helper.cpp b/base/src/H264DecoderV4L2Helper.cpp index d976464ac..a420ee8ac 100644 --- a/base/src/H264DecoderV4L2Helper.cpp +++ b/base/src/H264DecoderV4L2Helper.cpp @@ -647,7 +647,7 @@ void * h264DecoderV4L2Helper::capture_thread(void *arg) /* Check for resolution event to again ** set format and buffers on capture plane. */ - while (!(ctx->in_error || ctx->got_eos)) + while (!(ctx->in_error || ctx->got_eos) || ctx->in_error) { Buffer *decoded_buffer = new Buffer(ctx->cp_buf_type, ctx->cp_mem_type, 0); ret_val = m_nThread->dq_event(ctx, event, 0); diff --git a/base/src/H264DecoderV4L2Helper.h b/base/src/H264DecoderV4L2Helper.h index 3a556b06b..0b9586745 100644 --- a/base/src/H264DecoderV4L2Helper.h +++ b/base/src/H264DecoderV4L2Helper.h @@ -175,7 +175,7 @@ class h264DecoderV4L2Helper pthread_cond_t queue_cond; pthread_t dec_capture_thread; - bool in_error; + bool in_error = false; bool eos; bool got_eos; bool op_streamon; From ae9284f637237243defc236e1aa169969d8ca54d Mon Sep 17 00:00:00 2001 From: Vinayak-YB Date: Mon, 19 Feb 2024 15:50:51 +0530 Subject: [PATCH 19/19] mmq fps issue fix --- base/include/MultimediaQueueXform.h | 5 ++++- base/include/RTSPClientSrc.h | 2 ++ base/src/MultimediaQueueXform.cpp | 9 +++++++-- base/src/RTSPClientSrc.cpp | 26 +++++++++++++++++++++++++- 4 files changed, 38 insertions(+), 4 deletions(-) diff --git a/base/include/MultimediaQueueXform.h b/base/include/MultimediaQueueXform.h index 289baadb5..c68980e65 100644 --- a/base/include/MultimediaQueueXform.h +++ b/base/include/MultimediaQueueXform.h @@ -13,16 +13,18 @@ class MultimediaQueueXformProps : public ModuleProps upperWaterMark = 15000; isMapDelayInTime = true; } - MultimediaQueueXformProps(uint32_t queueLength = 10000, uint16_t tolerance = 5000, bool _isDelayTime = true) + MultimediaQueueXformProps(uint32_t queueLength = 10000, uint16_t tolerance = 5000, int _mmqFps = 24, bool _isDelayTime = true) { lowerWaterMark = queueLength; upperWaterMark = queueLength + tolerance; isMapDelayInTime = _isDelayTime; + mmqFps = _mmqFps; } uint32_t lowerWaterMark; // Length of multimedia queue in terms of time or number of frames uint32_t upperWaterMark; //Length of the multimedia queue when the next module queue is full bool isMapDelayInTime; + int mmqFps; }; class State; @@ -46,6 +48,7 @@ class MultimediaQueueXform : public Module { MultimediaQueueXformProps mProps; boost::shared_ptr getQue(); void extractFramesAndEnqueue(boost::shared_ptr& FrameQueue); + void setMmqFps(int fps); protected: bool process(frame_container& frames); bool validateInputPins(); diff --git a/base/include/RTSPClientSrc.h b/base/include/RTSPClientSrc.h index d857e70bc..b8825856c 100644 --- a/base/include/RTSPClientSrc.h +++ b/base/include/RTSPClientSrc.h @@ -1,6 +1,7 @@ #pragma once #include #include "Module.h" +#include using namespace std; @@ -44,6 +45,7 @@ class RTSPClientSrc : public Module { bool init(); bool term(); void setProps(RTSPClientSrcProps& props); + int getCurrentFps(); RTSPClientSrcProps getProps(); protected: diff --git a/base/src/MultimediaQueueXform.cpp b/base/src/MultimediaQueueXform.cpp index 8c58e2724..8c04bf455 100644 --- a/base/src/MultimediaQueueXform.cpp +++ b/base/src/MultimediaQueueXform.cpp @@ -566,7 +566,7 @@ bool MultimediaQueueXform::init() } } mState.reset(new Idle(mState->queueObject)); - myTargetFrameLen = std::chrono::nanoseconds(1000000000 / 22); + myTargetFrameLen = std::chrono::nanoseconds(1000000000 / mProps.mmqFps); return true; } @@ -659,7 +659,7 @@ boost::shared_ptr MultimediaQueueXform::getQue() bool MultimediaQueueXform::handleCommand(Command::CommandType type, frame_sp& frame) { - myTargetFrameLen = std::chrono::nanoseconds(1000000000 / 22); + myTargetFrameLen = std::chrono::nanoseconds(1000000000 / mProps.mmqFps); initDone = false; LOG_ERROR << "command received"; if (type == Command::CommandType::MultimediaQueueXform) @@ -954,6 +954,11 @@ bool MultimediaQueueXform::process(frame_container& frames) return true; } +void MultimediaQueueXform::setMmqFps(int fps) +{ + mProps.mmqFps = fps; +} + bool MultimediaQueueXform::handlePropsChange(frame_sp& frame) { if (mState->Type != State::EXPORT) diff --git a/base/src/RTSPClientSrc.cpp b/base/src/RTSPClientSrc.cpp index e582ce5f3..c5fca7213 100644 --- a/base/src/RTSPClientSrc.cpp +++ b/base/src/RTSPClientSrc.cpp @@ -128,6 +128,12 @@ class RTSPClientSrc::Detail bool readBuffer() { + if(!initDone) + { + std::chrono::time_point t = std::chrono::system_clock::now(); + beginTs = std::chrono::duration_cast(t.time_since_epoch()); + initDone = true; + } frame_container outFrames; bool got_something = false; while(!got_something) @@ -176,17 +182,27 @@ class RTSPClientSrc::Detail { LOG_WARNING << "oops! there is already another packet for pin " << it->second; } + auto diff = dur - beginTs; + if(diff.count() > 1000) + { + currentCameraFps = frameCount; + frameCount = 0; + beginTs = dur; + } + frameCount++; } av_packet_unref(&packet); } } + if(outFrames.size()>0) myModule->send(outFrames); return true; } bool isConncected() const { return bConnected; } - + int frameCount = 0; + int currentCameraFps = 0; private: AVPacket packet; AVFormatContext* pFormatCtx = nullptr; @@ -196,6 +212,8 @@ class RTSPClientSrc::Detail bool bUseTCP; std::map streamsMap; RTSPClientSrc* myModule; + std::chrono::milliseconds beginTs; + bool initDone = false; }; RTSPClientSrc::RTSPClientSrc(RTSPClientSrcProps _props) : Module(SOURCE, "RTSPClientSrc", _props), mProps(_props) @@ -240,4 +258,10 @@ bool RTSPClientSrc::handleCommand(Command::CommandType type, frame_sp& frame) } return true; } + +int RTSPClientSrc::getCurrentFps() +{ + return mDetail->currentCameraFps; +} + bool RTSPClientSrc::handlePropsChange(frame_sp& frame) { return true; }