diff --git a/include/CVStabilization.h b/include/CVStabilization.h index 59cca78af..91638ed47 100644 --- a/include/CVStabilization.h +++ b/include/CVStabilization.h @@ -82,6 +82,9 @@ class CVStabilization { int smoothingWindow; // In frames. The larger the more stable the video, but less reactive to sudden panning + size_t start; + size_t end; + cv::Mat last_T; cv::Mat cur, cur_grey; cv::Mat prev, prev_grey; @@ -111,7 +114,7 @@ class CVStabilization { CVStabilization(std::string processInfoJson, ProcessingController &processingController); // Process clip and store necessary stabilization data - void stabilizeClip(openshot::Clip& video, size_t start=0, size_t end=0, bool process_interval=false); + void stabilizeClip(openshot::Clip& video, size_t _start=0, size_t _end=0, bool process_interval=false); /// Protobuf Save and Load methods // Save stabilization data to protobuf file diff --git a/src/CVStabilization.cpp b/src/CVStabilization.cpp index a24545874..aa98f629f 100644 --- a/src/CVStabilization.cpp +++ b/src/CVStabilization.cpp @@ -37,16 +37,21 @@ CVStabilization::CVStabilization(std::string processInfoJson, ProcessingControll } // Process clip and store necessary stabilization data -void CVStabilization::stabilizeClip(openshot::Clip& video, size_t start, size_t end, bool process_interval){ +void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t _end, bool process_interval){ + + start = _start; end = _end; + + video.Open(); size_t frame_number; if(!process_interval || end == 0 || end-start <= 0){ // Get total number of frames in video - end = video.Reader()->info.video_length; + start = video.Start() * video.Reader()->info.fps.ToInt(); + end = video.End() * video.Reader()->info.fps.ToInt(); } // Extract and track opticalflow features for each frame - for (frame_number = start; frame_number <= end; frame_number++) + for (frame_number = start; frame_number < end; frame_number++) { // Stop the feature tracker process if(processingController->ShouldStop()){ @@ -58,6 +63,7 @@ void CVStabilization::stabilizeClip(openshot::Clip& video, size_t start, size_t // Grab OpenCV Mat image cv::Mat cvimage = f->GetImageCV(); cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY); + TrackFrameFeatures(cvimage, frame_number); // Update progress @@ -76,6 +82,7 @@ void CVStabilization::stabilizeClip(openshot::Clip& video, size_t start, size_t // Track current frame features and find the relative transformation void CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){ + if(prev_grey.empty()){ prev_grey = frame; return; @@ -91,7 +98,6 @@ void CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){ cv::goodFeaturesToTrack(prev_grey, prev_corner, 200, 0.01, 30); // Track features cv::calcOpticalFlowPyrLK(prev_grey, frame, prev_corner, cur_corner, status, err); - // Remove untracked features for(size_t i=0; i < status.size(); i++) { if(status[i]) { @@ -120,6 +126,7 @@ void CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){ // Show processing info cout << "Frame: " << frameNum << " - good optical flow: " << prev_corner2.size() << endl; + } std::vector CVStabilization::ComputeFramesTrajectory(){ @@ -169,7 +176,7 @@ std::map CVStabilization::SmoothTrajectory(std::vector CVStabilization::GenNewCamPosition(std::map Frame::Mat2Qimage(cv::Mat img){ cv::cvtColor(img, img, cv::COLOR_BGR2RGB); QImage qimg((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGB888); + std::shared_ptr imgIn = std::make_shared(qimg.copy()); + // Always convert to RGBA8888 (if different) if (imgIn->format() != QImage::Format_RGBA8888) *imgIn = imgIn->convertToFormat(QImage::Format_RGBA8888); diff --git a/src/effects/Stabilizer.cpp b/src/effects/Stabilizer.cpp index 874b01694..b04ba6e2f 100644 --- a/src/effects/Stabilizer.cpp +++ b/src/effects/Stabilizer.cpp @@ -132,7 +132,7 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){ const libopenshotstabilize::Frame& pbFrameData = stabilizationMessage.frame(i); // Load frame number - int id = pbFrameData.id(); + size_t id = pbFrameData.id(); // Load camera trajectory data float x = pbFrameData.x(); @@ -148,7 +148,7 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){ float da = pbFrameData.da(); // Assing data to transformation map - transformationData[i] = EffectTransformParam(dx,dy,da); + transformationData[id] = EffectTransformParam(dx,dy,da); } // Show the time stamp from the last update in stabilization data file diff --git a/src/examples/Example_opencv.cpp b/src/examples/Example_opencv.cpp index 49c267097..5865c4ce0 100644 --- a/src/examples/Example_opencv.cpp +++ b/src/examples/Example_opencv.cpp @@ -31,153 +31,160 @@ #include #include #include -#include #include "../../include/CVTracker.h" #include "../../include/CVStabilization.h" -#include "../../include/trackerdata.pb.h" #include "../../include/OpenShot.h" #include "../../include/CrashHandler.h" using namespace openshot; -// using namespace cv; - -// void displayTrackedData(openshot::Clip &r9){ -// // Opencv display window -// cv::namedWindow("Display Image", cv::WINDOW_NORMAL ); - -// // Create Tracker -// CVTracker kcfTracker; -// // Load saved data -// if(!kcfTracker.LoadTrackedData("kcf_tracker.data")){ -// std::cout<<"Was not possible to load the tracked data\n"; -// return; -// } - -// for (long int frame = 1200; frame <= 1600; frame++) -// { -// int frame_number = frame; -// std::shared_ptr f = r9.GetFrame(frame_number); - -// // Grab Mat image -// cv::Mat cvimage = f->GetImageCV(); - -// FrameData fd = kcfTracker.GetTrackedData(frame_number); -// cv::Rect2d box(fd.x1, fd.y1, fd.x2-fd.x1, fd.y2-fd.y1); -// cv::rectangle(cvimage, box, cv::Scalar( 255, 0, 0 ), 2, 1 ); - -// cv::imshow("Display Image", cvimage); -// // Press ESC on keyboard to exit -// char c=(char)cv::waitKey(25); -// if(c==27) -// break; -// } - -// } - +using namespace std; +// Show the pre-processed clip on the screen void displayClip(openshot::Clip &r9){ + // Opencv display window cv::namedWindow("Display Image", cv::WINDOW_NORMAL ); + // Get video lenght int videoLenght = r9.Reader()->info.video_length; + // Loop through the clip and show it with the effects, if any for (long int frame = 0; frame < videoLenght; frame++) { int frame_number = frame; + // Get the frame std::shared_ptr f = r9.GetFrame(frame_number); - // Grab Mat image + // Grab OpenCV::Mat image cv::Mat cvimage = f->GetImageCV(); + // Convert color scheme from RGB (QImage scheme) to BGR (OpenCV scheme) + cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2BGR); + // Display the frame cv::imshow("Display Image", cvimage); - // Press ESC on keyboard to exit + + // Press ESC on keyboard to exit char c=(char)cv::waitKey(25); if(c==27) break; } + // Destroy all remaining windows + cv::destroyAllWindows(); +} +// Return JSON string for the tracker effect +string trackerJson(cv::Rect2d r, bool onlyProtoPath){ + // Set the tracker + string tracker = "KCF"; + + // Construct all the composition of the JSON string + string trackerType = "\"tracker_type\": \"" + tracker + "\""; + string protobuf_data_path = "\"protobuf_data_path\": \"kcf_tracker.data\""; + stringstream bboxCoords; + bboxCoords << "\"bbox\": {\"x\":"<GetImageCV(); - // cv::Mat T(2,3,CV_64F); - - // // Get tracked data - // TransformParam tp = stabilizer.GetTransformParamData(frame_number); - // CamTrajectory ct = stabilizer.GetCamTrajectoryTrackedData(frame_number); - - // // Set rotation matrix values - // T.at(0,0) = cos(tp.da); - // T.at(0,1) = -sin(tp.da); - // T.at(1,0) = sin(tp.da); - // T.at(1,1) = cos(tp.da); - - // T.at(0,2) = tp.dx; - // T.at(1,2) = tp.dy; - - // // Apply rotation matrix to image - // cv::Mat frame_stabilized; - // cv::warpAffine(frame_image, frame_stabilized, T, frame_image.size()); - - // // Scale up the image to remove black borders - // cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04); - // cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size()); - - // std::cout<GetImageCV(); + cv::Rect2d r = cv::selectROI(roi); + cv::destroyAllWindows(); + + // Create a tracker object by passing a JSON string and a thread controller, this last one won't be used + // JSON info: path to save the tracked data, type of tracker and bbox coordinates + CVTracker tracker(trackerJson(r, false), processingController); + + // Start the tracking + tracker.trackClip(r9); + // Save the tracked data + tracker.SaveTrackedData(); + + // Create a tracker effect + EffectBase* e = EffectInfo().CreateEffect("Tracker"); + + // Pass a JSON string with the saved tracked data + // The effect will read and save the tracking in a map:: + e->SetJson(trackerJson(r, true)); + // Add the effect to the clip + r9.AddEffect(e); + } + + // Aplly stabilizer effect on the clip + if(SMOOTH_VIDEO){ + + // Create a stabilizer object by passing a JSON string and a thread controller, this last one won't be used + // JSON info: path to save the stabilized data and smoothing window value + CVStabilization stabilizer(stabilizerJson(false), processingController); + + // Start the stabilization + stabilizer.stabilizeClip(r9); + // Save the stabilization data + stabilizer.SaveStabilizedData(); + + // Create a stabilizer effect + EffectBase* e = EffectInfo().CreateEffect("Stabilizer"); + + // Pass a JSON string with the saved stabilized data + // The effect will read and save the stabilization in a map:: + e->SetJson(stabilizerJson(true)); + // Add the effect to the clip + r9.AddEffect(e); + } + + if(OBJECT_DETECTION_DATA){ + // CVObjectDetection objectDetection("GPU"); + // objectDetection.ProcessClip(r9); + } + + // Show the pre-processed clip on the screen + displayClip(r9); + // Close timeline + r9.Close(); std::cout << "Completed successfully!" << std::endl;