Skip to content

Commit

Permalink
improved the stabilize effect integration
Browse files Browse the repository at this point in the history
  • Loading branch information
BrennoCaldato committed Jul 15, 2020
1 parent 6d54765 commit d6e0a69
Show file tree
Hide file tree
Showing 13 changed files with 229 additions and 168 deletions.
13 changes: 2 additions & 11 deletions include/Clip.h
Expand Up @@ -38,8 +38,7 @@
#include <opencv2/core.hpp>
#undef uint64
#undef int64

// #include "CVStabilization.h"

#endif

#include <memory>
Expand Down Expand Up @@ -161,14 +160,6 @@ namespace openshot {
openshot::FrameDisplayType display; ///< The format to display the frame number (if any)
openshot::VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips

// #ifdef USE_OPENCV
// /// Smoothed transformation for all the clip frames
// std::vector <TransformParam> new_prev_to_cur_transform;
// /// apply the smoothed transformation warp when retrieving a frame
// bool hasStabilization = false;
// void apply_stabilization(std::shared_ptr<openshot::Frame> f, int64_t frame_number);
// #endif

/// Default Constructor
Clip();

Expand Down Expand Up @@ -232,7 +223,7 @@ namespace openshot {
void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip

/// Stabilize the clip using opencv and opticalflow
void stabilize_video();
//void stabilize_video();

// Scale, Location, and Alpha curves
openshot::Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1)
Expand Down
19 changes: 12 additions & 7 deletions include/ClipProcessingJobs.h
Expand Up @@ -42,23 +42,28 @@
#endif

#include "Clip.h"
#include "effects/Tracker.h"
#include "effects/Stabilizer.h"

using namespace openshot;

// Constructor responsible to choose processing type and apply to clip
class ClipProcessingJobs{

private:
int processingProgress;
bool processingDone = false;
bool stopProcessing = false;

public:
ClipProcessingJobs(std::string processingType, Clip& videoClip);

int GetProgress();

void CancelProcessing();

// Apply object tracking to clip
void trackVideo(Clip& videoClip);
std::string trackVideo(Clip& videoClip);
// Apply stabilization to clip
void stabilizeVideo(Clip& video);

public:
ClipProcessingJobs(std::string processingType, Clip& videoClip);
std::string stabilizeVideo(Clip& videoClip);



Expand Down
5 changes: 1 addition & 4 deletions include/EffectInfo.h
Expand Up @@ -49,10 +49,7 @@ namespace openshot
public:
// Create an instance of an effect (factory style)
EffectBase* CreateEffect(std::string effect_type);

// Create an instance of an video effect (require processing the whole clip)
EffectBase* CreateEffect(std::string effect_type, Clip* video_clip);

EffectBase* CreateEffect(std::string effect_type, std::string pb_data_path);

/// JSON methods
static std::string Json(); ///< Generate JSON string of this object
Expand Down
2 changes: 2 additions & 0 deletions include/Effects.h
Expand Up @@ -43,8 +43,10 @@
#include "effects/Mask.h"
#include "effects/Negate.h"
#include "effects/Pixelate.h"
#include "effects/Stabilizer.h"
#include "effects/Saturation.h"
#include "effects/Shift.h"
#include "effects/Tracker.h"
#include "effects/Wave.h"


Expand Down
42 changes: 37 additions & 5 deletions include/effects/Stabilizer.h
Expand Up @@ -33,16 +33,47 @@

#include "../EffectBase.h"

#include <google/protobuf/util/time_util.h>

#include <cmath>
#include <stdio.h>
#include <memory>
#include "../Color.h"
#include "../Json.h"
#include "../KeyFrame.h"
#include "../CVStabilization.h"
#include "../Clip.h"
#include "../stabilizedata.pb.h"

using namespace std;
using google::protobuf::util::TimeUtil;

struct EffectTransformParam
{
EffectTransformParam() {}
EffectTransformParam(double _dx, double _dy, double _da) {
dx = _dx;
dy = _dy;
da = _da;
}

double dx;
double dy;
double da; // angle
};

struct EffectCamTrajectory
{
EffectCamTrajectory() {}
EffectCamTrajectory(double _x, double _y, double _a) {
x = _x;
y = _y;
a = _a;
}

double x;
double y;
double a; // angle
};


namespace openshot
{
Expand All @@ -58,11 +89,12 @@ namespace openshot
private:
/// Init effect settings
void init_effect_details();

std::string protobuf_data_path;

public:
std::map <size_t,CamTrajectory> trajectoryData; // Save camera trajectory data
std::map <size_t,TransformParam> transformationData; // Save transormation data
std::string teste;
std::map <size_t,EffectCamTrajectory> trajectoryData; // Save camera trajectory data
std::map <size_t,EffectTransformParam> transformationData; // Save transormation data

/// Blank constructor, useful when using Json to load the effect properties
Stabilizer(std::string clipTrackerDataPath);
Expand Down
40 changes: 36 additions & 4 deletions include/effects/Tracker.h
Expand Up @@ -33,16 +33,48 @@

#include "../EffectBase.h"

#include <google/protobuf/util/time_util.h>

#include <cmath>
#include <fstream>
#include <stdio.h>
#include <memory>
#include "../Color.h"
#include "../Json.h"
#include "../KeyFrame.h"
#include "../CVTracker.h"
#include "../Clip.h"
#include "../trackerdata.pb.h"

using namespace std;
using google::protobuf::util::TimeUtil;


// Tracking info struct
struct EffectFrameData{
int frame_id = -1;
float rotation = 0;
int x1 = -1;
int y1 = -1;
int x2 = -1;
int y2 = -1;

// Constructors
EffectFrameData()
{}

EffectFrameData( int _frame_id)
{frame_id = _frame_id;}

EffectFrameData( int _frame_id , float _rotation, int _x1, int _y1, int _x2, int _y2)
{
frame_id = _frame_id;
rotation = _rotation;
x1 = _x1;
y1 = _y1;
x2 = _x2;
y2 = _y2;
}
};


namespace openshot
{
Expand All @@ -61,7 +93,7 @@ namespace openshot

public:

std::map<int, FrameData> trackedDataById; // Save object tracking box data
std::map<int, EffectFrameData> trackedDataById; // Save object tracking box data

/// Blank constructor, useful when using Json to load the effect properties
Tracker(std::string clipTrackerDataPath);
Expand All @@ -84,7 +116,7 @@ namespace openshot
bool LoadTrackedData(std::string inputFilePath);

// Get tracker info for the desired frame
FrameData GetTrackedData(int frameId);
EffectFrameData GetTrackedData(int frameId);

/// Get and Set JSON methods
std::string Json() const override; ///< Generate JSON string of this object
Expand Down
103 changes: 21 additions & 82 deletions src/Clip.cpp
Expand Up @@ -360,12 +360,6 @@ std::shared_ptr<Frame> Clip::GetFrame(int64_t requested_frame)
// Apply effects to the frame (if any)
apply_effects(frame);

// #ifdef USE_OPENCV
// if(hasStabilization){
// apply_stabilization(frame, requested_frame);
// }
// #endif

// Return processed 'frame'
return frame;
}
Expand All @@ -374,33 +368,6 @@ std::shared_ptr<Frame> Clip::GetFrame(int64_t requested_frame)
throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.");
}

// #ifdef USE_OPENCV
// void Clip::apply_stabilization(std::shared_ptr<openshot::Frame> f, int64_t frame_number){
// cv::Mat T(2,3,CV_64F);

// // Grab Mat image
// cv::Mat cur = f->GetImageCV();

// T.at<double>(0,0) = cos(new_prev_to_cur_transform[frame_number].da);
// T.at<double>(0,1) = -sin(new_prev_to_cur_transform[frame_number].da);
// T.at<double>(1,0) = sin(new_prev_to_cur_transform[frame_number].da);
// T.at<double>(1,1) = cos(new_prev_to_cur_transform[frame_number].da);

// T.at<double>(0,2) = new_prev_to_cur_transform[frame_number].dx;
// T.at<double>(1,2) = new_prev_to_cur_transform[frame_number].dy;

// cv::Mat frame_stabilized;

// cv::warpAffine(cur, frame_stabilized, T, cur.size());

// // Scale up the image to remove black borders
// cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04);
// cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size());

// f->SetImageCV(frame_stabilized);
// }
// #endif

// Get file extension
std::string Clip::get_file_extension(std::string path)
{
Expand Down Expand Up @@ -945,16 +912,30 @@ void Clip::SetJsonValue(const Json::Value root) {
for (const auto existing_effect : root["effects"]) {
// Create Effect
EffectBase *e = NULL;

if (!existing_effect["type"].isNull()) {
// Create instance of effect
if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {

// Load Json into Effect
e->SetJsonValue(existing_effect);
std::vector<std::string> pEffects{"Stabilizer", "Tracker"};
std::string effectName = existing_effect["type"].asString();

if(std::find(pEffects.begin(), pEffects.end(), effectName) == pEffects.end()){
// Create instance of effect
if ( (e = EffectInfo().CreateEffect(effectName))) {

// Load Json into Effect
e->SetJsonValue(existing_effect);

// Add Effect to Timeline
AddEffect(e);
}
}
else{
if ( (e = EffectInfo().CreateEffect(effectName, existing_effect["protobuf_data_path"].asString()))) {
// Load Json into Effect
e->SetJsonValue(existing_effect);

// Add Effect to Timeline
AddEffect(e);
// Add Effect to Timeline
AddEffect(e);
}
}
}
}
Expand Down Expand Up @@ -1075,45 +1056,3 @@ std::shared_ptr<Frame> Clip::apply_effects(std::shared_ptr<Frame> frame)
// Return modified frame
return frame;
}

// #ifdef USE_OPENCV
// void Clip::stabilize_video(){
// // create CVStabilization object
// CVStabilization stabilizer;

// // Make sure Clip is opened
// Open();
// // Get total number of frames
// int videoLenght = Reader()->info.video_length;

// // Get first Opencv image
// // std::shared_ptr<openshot::Frame> f = GetFrame(0);
// // cv::Mat prev = f->GetImageCV();
// // // OpticalFlow works with grayscale images
// // cv::cvtColor(prev, prev_grey, cv::COLOR_BGR2GRAY);

// // Extract and track opticalflow features for each frame
// for (long int frame_number = 0; frame_number <= videoLenght; frame_number++)
// {
// std::shared_ptr<openshot::Frame> f = GetFrame(frame_number);

// // Grab Mat image
// cv::Mat cvimage = f->GetImageCV();
// cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY);
// stabilizer.TrackFrameFeatures(cvimage, frame_number);
// }

// vector <CamTrajectory> trajectory = stabilizer.ComputeFramesTrajectory();

// vector <CamTrajectory> smoothed_trajectory = stabilizer.SmoothTrajectory(trajectory);

// // Get the smoothed trajectory
// new_prev_to_cur_transform = stabilizer.GenNewCamPosition(smoothed_trajectory);
// // Will apply the smoothed transformation warp when retrieving a frame
// hasStabilization = true;
// }
// #else
// void Clip::stabilize_video(){
// throw "Please compile libopenshot with OpenCV to use this feature";
// }
// #endif

0 comments on commit d6e0a69

Please sign in to comment.