diff --git a/include/Clip.h b/include/Clip.h index dc33223db..96d9a8130 100644 --- a/include/Clip.h +++ b/include/Clip.h @@ -38,8 +38,7 @@ #include #undef uint64 #undef int64 - - // #include "CVStabilization.h" + #endif #include @@ -161,14 +160,6 @@ namespace openshot { openshot::FrameDisplayType display; ///< The format to display the frame number (if any) openshot::VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips - // #ifdef USE_OPENCV - // /// Smoothed transformation for all the clip frames - // std::vector new_prev_to_cur_transform; - // /// apply the smoothed transformation warp when retrieving a frame - // bool hasStabilization = false; - // void apply_stabilization(std::shared_ptr f, int64_t frame_number); - // #endif - /// Default Constructor Clip(); @@ -232,7 +223,7 @@ namespace openshot { void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip /// Stabilize the clip using opencv and opticalflow - void stabilize_video(); + //void stabilize_video(); // Scale, Location, and Alpha curves openshot::Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1) diff --git a/include/ClipProcessingJobs.h b/include/ClipProcessingJobs.h index d1833ebab..7887e1a38 100644 --- a/include/ClipProcessingJobs.h +++ b/include/ClipProcessingJobs.h @@ -42,8 +42,6 @@ #endif #include "Clip.h" -#include "effects/Tracker.h" -#include "effects/Stabilizer.h" using namespace openshot; @@ -51,14 +49,21 @@ using namespace openshot; class ClipProcessingJobs{ private: + int processingProgress; + bool processingDone = false; + bool stopProcessing = false; + + public: + ClipProcessingJobs(std::string processingType, Clip& videoClip); + + int GetProgress(); + + void CancelProcessing(); // Apply object tracking to clip - void trackVideo(Clip& videoClip); + std::string trackVideo(Clip& videoClip); // Apply stabilization to clip - void stabilizeVideo(Clip& video); - - public: - ClipProcessingJobs(std::string processingType, Clip& videoClip); + std::string stabilizeVideo(Clip& videoClip); diff --git a/include/EffectInfo.h b/include/EffectInfo.h index 529063899..901e4dba8 100644 --- a/include/EffectInfo.h +++ b/include/EffectInfo.h @@ -49,10 +49,7 @@ namespace openshot public: // Create an instance of an effect (factory style) EffectBase* CreateEffect(std::string effect_type); - - // Create an instance of an video effect (require processing the whole clip) - EffectBase* CreateEffect(std::string effect_type, Clip* video_clip); - + EffectBase* CreateEffect(std::string effect_type, std::string pb_data_path); /// JSON methods static std::string Json(); ///< Generate JSON string of this object diff --git a/include/Effects.h b/include/Effects.h index 746da4c0f..5773ae07e 100644 --- a/include/Effects.h +++ b/include/Effects.h @@ -43,8 +43,10 @@ #include "effects/Mask.h" #include "effects/Negate.h" #include "effects/Pixelate.h" +#include "effects/Stabilizer.h" #include "effects/Saturation.h" #include "effects/Shift.h" +#include "effects/Tracker.h" #include "effects/Wave.h" diff --git a/include/effects/Stabilizer.h b/include/effects/Stabilizer.h index 029168482..4c1a40a7f 100644 --- a/include/effects/Stabilizer.h +++ b/include/effects/Stabilizer.h @@ -33,16 +33,47 @@ #include "../EffectBase.h" +#include + #include #include #include #include "../Color.h" #include "../Json.h" #include "../KeyFrame.h" -#include "../CVStabilization.h" -#include "../Clip.h" #include "../stabilizedata.pb.h" +using namespace std; +using google::protobuf::util::TimeUtil; + +struct EffectTransformParam +{ + EffectTransformParam() {} + EffectTransformParam(double _dx, double _dy, double _da) { + dx = _dx; + dy = _dy; + da = _da; + } + + double dx; + double dy; + double da; // angle +}; + +struct EffectCamTrajectory +{ + EffectCamTrajectory() {} + EffectCamTrajectory(double _x, double _y, double _a) { + x = _x; + y = _y; + a = _a; + } + + double x; + double y; + double a; // angle +}; + namespace openshot { @@ -58,11 +89,12 @@ namespace openshot private: /// Init effect settings void init_effect_details(); - + std::string protobuf_data_path; public: - std::map trajectoryData; // Save camera trajectory data - std::map transformationData; // Save transormation data + std::string teste; + std::map trajectoryData; // Save camera trajectory data + std::map transformationData; // Save transormation data /// Blank constructor, useful when using Json to load the effect properties Stabilizer(std::string clipTrackerDataPath); diff --git a/include/effects/Tracker.h b/include/effects/Tracker.h index a84f4e758..b9bde1005 100644 --- a/include/effects/Tracker.h +++ b/include/effects/Tracker.h @@ -33,16 +33,48 @@ #include "../EffectBase.h" +#include + #include +#include #include #include #include "../Color.h" #include "../Json.h" #include "../KeyFrame.h" -#include "../CVTracker.h" -#include "../Clip.h" #include "../trackerdata.pb.h" +using namespace std; +using google::protobuf::util::TimeUtil; + + +// Tracking info struct +struct EffectFrameData{ + int frame_id = -1; + float rotation = 0; + int x1 = -1; + int y1 = -1; + int x2 = -1; + int y2 = -1; + + // Constructors + EffectFrameData() + {} + + EffectFrameData( int _frame_id) + {frame_id = _frame_id;} + + EffectFrameData( int _frame_id , float _rotation, int _x1, int _y1, int _x2, int _y2) + { + frame_id = _frame_id; + rotation = _rotation; + x1 = _x1; + y1 = _y1; + x2 = _x2; + y2 = _y2; + } +}; + namespace openshot { @@ -61,7 +93,7 @@ namespace openshot public: - std::map trackedDataById; // Save object tracking box data + std::map trackedDataById; // Save object tracking box data /// Blank constructor, useful when using Json to load the effect properties Tracker(std::string clipTrackerDataPath); @@ -84,7 +116,7 @@ namespace openshot bool LoadTrackedData(std::string inputFilePath); // Get tracker info for the desired frame - FrameData GetTrackedData(int frameId); + EffectFrameData GetTrackedData(int frameId); /// Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object diff --git a/src/Clip.cpp b/src/Clip.cpp index 09c3586e7..5e9cc9692 100644 --- a/src/Clip.cpp +++ b/src/Clip.cpp @@ -360,12 +360,6 @@ std::shared_ptr Clip::GetFrame(int64_t requested_frame) // Apply effects to the frame (if any) apply_effects(frame); -// #ifdef USE_OPENCV -// if(hasStabilization){ -// apply_stabilization(frame, requested_frame); -// } -// #endif - // Return processed 'frame' return frame; } @@ -374,33 +368,6 @@ std::shared_ptr Clip::GetFrame(int64_t requested_frame) throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method."); } -// #ifdef USE_OPENCV -// void Clip::apply_stabilization(std::shared_ptr f, int64_t frame_number){ -// cv::Mat T(2,3,CV_64F); - -// // Grab Mat image -// cv::Mat cur = f->GetImageCV(); - -// T.at(0,0) = cos(new_prev_to_cur_transform[frame_number].da); -// T.at(0,1) = -sin(new_prev_to_cur_transform[frame_number].da); -// T.at(1,0) = sin(new_prev_to_cur_transform[frame_number].da); -// T.at(1,1) = cos(new_prev_to_cur_transform[frame_number].da); - -// T.at(0,2) = new_prev_to_cur_transform[frame_number].dx; -// T.at(1,2) = new_prev_to_cur_transform[frame_number].dy; - -// cv::Mat frame_stabilized; - -// cv::warpAffine(cur, frame_stabilized, T, cur.size()); - -// // Scale up the image to remove black borders -// cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04); -// cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size()); - -// f->SetImageCV(frame_stabilized); -// } -// #endif - // Get file extension std::string Clip::get_file_extension(std::string path) { @@ -945,16 +912,30 @@ void Clip::SetJsonValue(const Json::Value root) { for (const auto existing_effect : root["effects"]) { // Create Effect EffectBase *e = NULL; - if (!existing_effect["type"].isNull()) { - // Create instance of effect - if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) { - // Load Json into Effect - e->SetJsonValue(existing_effect); + std::vector pEffects{"Stabilizer", "Tracker"}; + std::string effectName = existing_effect["type"].asString(); + + if(std::find(pEffects.begin(), pEffects.end(), effectName) == pEffects.end()){ + // Create instance of effect + if ( (e = EffectInfo().CreateEffect(effectName))) { + + // Load Json into Effect + e->SetJsonValue(existing_effect); + + // Add Effect to Timeline + AddEffect(e); + } + } + else{ + if ( (e = EffectInfo().CreateEffect(effectName, existing_effect["protobuf_data_path"].asString()))) { + // Load Json into Effect + e->SetJsonValue(existing_effect); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } } } @@ -1075,45 +1056,3 @@ std::shared_ptr Clip::apply_effects(std::shared_ptr frame) // Return modified frame return frame; } - -// #ifdef USE_OPENCV -// void Clip::stabilize_video(){ -// // create CVStabilization object -// CVStabilization stabilizer; - -// // Make sure Clip is opened -// Open(); -// // Get total number of frames -// int videoLenght = Reader()->info.video_length; - -// // Get first Opencv image -// // std::shared_ptr f = GetFrame(0); -// // cv::Mat prev = f->GetImageCV(); -// // // OpticalFlow works with grayscale images -// // cv::cvtColor(prev, prev_grey, cv::COLOR_BGR2GRAY); - -// // Extract and track opticalflow features for each frame -// for (long int frame_number = 0; frame_number <= videoLenght; frame_number++) -// { -// std::shared_ptr f = GetFrame(frame_number); - -// // Grab Mat image -// cv::Mat cvimage = f->GetImageCV(); -// cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY); -// stabilizer.TrackFrameFeatures(cvimage, frame_number); -// } - -// vector trajectory = stabilizer.ComputeFramesTrajectory(); - -// vector smoothed_trajectory = stabilizer.SmoothTrajectory(trajectory); - -// // Get the smoothed trajectory -// new_prev_to_cur_transform = stabilizer.GenNewCamPosition(smoothed_trajectory); -// // Will apply the smoothed transformation warp when retrieving a frame -// hasStabilization = true; -// } -// #else -// void Clip::stabilize_video(){ -// throw "Please compile libopenshot with OpenCV to use this feature"; -// } -// #endif diff --git a/src/ClipProcessingJobs.cpp b/src/ClipProcessingJobs.cpp index 505a44765..4c74a16a5 100644 --- a/src/ClipProcessingJobs.cpp +++ b/src/ClipProcessingJobs.cpp @@ -3,16 +3,19 @@ // Constructor responsible to choose processing type and apply to clip ClipProcessingJobs::ClipProcessingJobs(std::string processingType, Clip& videoClip){ - if(processingType == "Stabilize"){ - stabilizeVideo(videoClip); - } - if(processingType == "Track") - trackVideo(videoClip); + // if(processingType == "Stabilize"){ + // std::cout<<"Stabilize"; + // stabilizeVideo(videoClip); + // } + // if(processingType == "Track"){ + // std::cout<<"Track"; + // trackVideo(videoClip); + // } } // Apply object tracking to clip -void ClipProcessingJobs::trackVideo(Clip& videoClip){ +std::string ClipProcessingJobs::trackVideo(Clip& videoClip){ // Opencv display window cv::namedWindow("Display Image", cv::WINDOW_NORMAL ); @@ -23,28 +26,32 @@ void ClipProcessingJobs::trackVideo(Clip& videoClip){ tracker.trackClip(videoClip); // Save tracking data - tracker.SaveTrackedData("kcf_tracker.data"); + tracker.SaveTrackedData("/media/brenno/Data/projects/openshot/kcf_tracker.data"); - // Create new Tracker Effect - EffectBase* trackerEffect = new Tracker("kcf_tracker.data"); - // Apply Tracker Effect to clip - videoClip.AddEffect(trackerEffect); + // Return path to protobuf saved data + return "/media/brenno/Data/projects/openshot/kcf_tracker.data"; } // Apply stabilization to clip -void ClipProcessingJobs::stabilizeVideo(Clip& videoClip){ +std::string ClipProcessingJobs::stabilizeVideo(Clip& videoClip){ // create CVStabilization object CVStabilization stabilizer; // Start stabilization process stabilizer.ProcessClip(videoClip); // Save stabilization data - stabilizer.SaveStabilizedData("stabilization.data"); + stabilizer.SaveStabilizedData("/media/brenno/Data/projects/openshot/stabilization.data"); + + // Return path to protobuf saved data + return "/media/brenno/Data/projects/openshot/stabilization.data"; +} - // Create new Stabilizer Effect - EffectBase* stabilizeEffect = new Stabilizer("stabilization.data"); - // Apply Stabilizer Effect to clip - videoClip.AddEffect(stabilizeEffect); +int ClipProcessingJobs::GetProgress(){ + return processingProgress; +} -} \ No newline at end of file + +void ClipProcessingJobs::CancelProcessing(){ + stopProcessing = true; +} diff --git a/src/EffectInfo.cpp b/src/EffectInfo.cpp index d2c0b30c8..e169217f8 100644 --- a/src/EffectInfo.cpp +++ b/src/EffectInfo.cpp @@ -29,7 +29,6 @@ */ #include "../include/EffectInfo.h" -// #include "../include/Clip.h" using namespace openshot; @@ -86,9 +85,24 @@ EffectBase* EffectInfo::CreateEffect(std::string effect_type) { else if (effect_type == "Wave") return new Wave(); + + else if(effect_type == "Stabilizer") + return new Stabilizer(); + + else if(effect_type == "Tracker") + return new Tracker(); + return NULL; } +EffectBase* EffectInfo::CreateEffect(std::string effect_type, std::string pb_data_path){ + if(effect_type == "Stabilizer") + return new Stabilizer(pb_data_path); + + else if(effect_type == "Tracker") + return new Tracker(); +} + // Generate Json::Value for this object Json::Value EffectInfo::JsonValue() { @@ -110,6 +124,8 @@ Json::Value EffectInfo::JsonValue() { root.append(Saturation().JsonInfo()); root.append(Shift().JsonInfo()); root.append(Wave().JsonInfo()); + root.append(Stabilizer().JsonInfo()); + root.append(Tracker().JsonInfo()); // return JsonValue return root; diff --git a/src/Frame.cpp b/src/Frame.cpp index 68760e44c..c623ed48e 100644 --- a/src/Frame.cpp +++ b/src/Frame.cpp @@ -953,7 +953,7 @@ cv::Mat Frame::GetImageCV() } std::shared_ptr Frame::Mat2Qimage(cv::Mat img){ - // cv::cvtColor(img, img, cv::COLOR_BGR2RGB); + cv::cvtColor(img, img, cv::COLOR_BGR2RGB); std::shared_ptr imgIn = std::shared_ptr(new QImage((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGB888)); // Always convert to RGBA8888 (if different) if (imgIn->format() != QImage::Format_RGBA8888) diff --git a/src/bindings/python/openshot.i b/src/bindings/python/openshot.i index 53e514c15..bbe4196aa 100644 --- a/src/bindings/python/openshot.i +++ b/src/bindings/python/openshot.i @@ -206,6 +206,7 @@ %include "Timeline.h" %include "ZmqLogger.h" %include "AudioDeviceInfo.h" +%include "ClipProcessingJobs.h" #ifdef USE_IMAGEMAGICK %include "ImageReader.h" @@ -228,6 +229,8 @@ %include "effects/Saturation.h" %include "effects/Shift.h" %include "effects/Wave.h" +%include "effects/Stabilizer.h" +%include "effects/Tracker.h" /* Wrap std templates (list, vector, etc...) */ diff --git a/src/effects/Stabilizer.cpp b/src/effects/Stabilizer.cpp index cb367f41d..2a61b25c1 100644 --- a/src/effects/Stabilizer.cpp +++ b/src/effects/Stabilizer.cpp @@ -33,11 +33,10 @@ using namespace openshot; /// Blank constructor, useful when using Json to load the effect properties -Stabilizer::Stabilizer(std::string clipStabilizedDataPath) +Stabilizer::Stabilizer(std::string clipStabilizedDataPath):protobuf_data_path(clipStabilizedDataPath) { // Init effect properties init_effect_details(); - // Tries to load the stabilization data from protobuf LoadStabilizedData(clipStabilizedDataPath); } @@ -47,11 +46,13 @@ Stabilizer::Stabilizer() { // Init effect properties init_effect_details(); + // LoadStabilizedData("/home/gustavostahl/LabVisao/VideoEditor/openshot-qt/stabilization.data"); } // Init effect settings void Stabilizer::init_effect_details() { + /// Initialize the values of the EffectInfo struct. InitEffectInfo(); @@ -61,42 +62,54 @@ void Stabilizer::init_effect_details() info.description = "Stabilize video clip to remove undesired shaking and jitter."; info.has_audio = false; info.has_video = true; + } // This method is required for all derived classes of EffectBase, and returns a // modified openshot::Frame object std::shared_ptr Stabilizer::GetFrame(std::shared_ptr frame, int64_t frame_number) { - // Create empty rotation matrix - cv::Mat T(2,3,CV_64F); - // Grab OpenCV Mat image - cv::Mat cur = frame->GetImageCV(); - - // Set rotation matrix values - T.at(0,0) = cos(transformationData[frame_number].da); - T.at(0,1) = -sin(transformationData[frame_number].da); - T.at(1,0) = sin(transformationData[frame_number].da); - T.at(1,1) = cos(transformationData[frame_number].da); - - T.at(0,2) = transformationData[frame_number].dx; - T.at(1,2) = transformationData[frame_number].dy; - - // Apply rotation matrix to image - cv::Mat frame_stabilized; - cv::warpAffine(cur, frame_stabilized, T, cur.size()); - - // Scale up the image to remove black borders - cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04); - cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size()); + // Grab OpenCV Mat image + cv::Mat frame_image = frame->GetImageCV(); + + // If frame is NULL, return itself + if(!frame_image.empty()){ + + // Check if track data exists for the requested frame + if(transformationData.find(frame_number) != transformationData.end()){ + + // Create empty rotation matrix + cv::Mat T(2,3,CV_64F); + + // Set rotation matrix values + T.at(0,0) = cos(transformationData[frame_number].da); + T.at(0,1) = -sin(transformationData[frame_number].da); + T.at(1,0) = sin(transformationData[frame_number].da); + T.at(1,1) = cos(transformationData[frame_number].da); + + T.at(0,2) = transformationData[frame_number].dx; + T.at(1,2) = transformationData[frame_number].dy; + + // Apply rotation matrix to image + cv::Mat frame_stabilized; + cv::warpAffine(frame_image, frame_stabilized, T, frame_image.size()); + + // Scale up the image to remove black borders + cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04); + cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size()); + frame_image = frame_stabilized; + } + } // Set stabilized image to frame - frame->SetImageCV(frame_stabilized); - + // If the input image is NULL or doesn't have tracking data, it's returned as it came + frame->SetImageCV(frame_image); return frame; } // Load protobuf data file bool Stabilizer::LoadStabilizedData(std::string inputFilePath){ + // Create stabilization message libopenshotstabilize::Stabilization stabilizationMessage; @@ -113,6 +126,7 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){ // Iterate over all frames of the saved message and assign to the data maps for (size_t i = 0; i < stabilizationMessage.frame_size(); i++) { + // Create stabilization message const libopenshotstabilize::Frame& pbFrameData = stabilizationMessage.frame(i); @@ -125,7 +139,7 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){ float a = pbFrameData.a(); // Assign data to trajectory map - trajectoryData[i] = CamTrajectory(x,y,a); + trajectoryData[i] = EffectCamTrajectory(x,y,a); // Load transformation data float dx = pbFrameData.dx(); @@ -133,7 +147,8 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){ float da = pbFrameData.da(); // Assing data to transformation map - transformationData[i] = TransformParam(dx,dy,da); + transformationData[i] = EffectTransformParam(dx,dy,da); + std::cout< Tracker::GetFrame(std::shared_ptr frame, int64_t f // Get the frame's image cv::Mat frame_image = frame->GetImageCV(); - // Draw box on image - FrameData fd = trackedDataById[frame_number]; - cv::Rect2d box(fd.x1, fd.y1, fd.x2-fd.x1, fd.y2-fd.y1); - cv::rectangle(frame_image, box, cv::Scalar( 255, 0, 0 ), 2, 1 ); + // Check if frame isn't NULL + if(!frame_image.empty()){ + + // Check if track data exists for the requested frame + if (trackedDataById.find(frame_number) != trackedDataById.end()) { + + // Draw box on image + EffectFrameData fd = trackedDataById[frame_number]; + cv::Rect2d box(fd.x1, fd.y1, fd.x2-fd.x1, fd.y2-fd.y1); + cv::rectangle(frame_image, box, cv::Scalar( 255, 0, 0 ), 2, 1 ); + } + } // Set image with drawn box to frame + // If the input image is NULL or doesn't have tracking data, it's returned as it came frame->SetImageCV(frame_image); return frame; @@ -115,7 +124,7 @@ bool Tracker::LoadTrackedData(std::string inputFilePath){ int y2 = box.y2(); // Assign data to tracker map - trackedDataById[id] = FrameData(id, rotation, x1, y1, x2, y2); + trackedDataById[id] = EffectFrameData(id, rotation, x1, y1, x2, y2); } // Show the time stamp from the last update in tracker data file @@ -130,11 +139,11 @@ bool Tracker::LoadTrackedData(std::string inputFilePath){ } // Get tracker info for the desired frame -FrameData Tracker::GetTrackedData(int frameId){ +EffectFrameData Tracker::GetTrackedData(int frameId){ // Check if the tracker info for the requested frame exists if ( trackedDataById.find(frameId) == trackedDataById.end() ) { - return FrameData(); + return EffectFrameData(); } else { return trackedDataById[frameId]; }