diff --git a/bindings/python/openshot.i b/bindings/python/openshot.i index 764333aa6..2bb21b20b 100644 --- a/bindings/python/openshot.i +++ b/bindings/python/openshot.i @@ -120,6 +120,7 @@ #include "effects/Stabilizer.h" #include "effects/Tracker.h" #include "effects/ObjectDetection.h" + #include "TrackedObjectBBox.h" %} #endif @@ -291,6 +292,7 @@ #ifdef USE_OPENCV %include "ClipProcessingJobs.h" + %include "TrackedObjectBBox.h" #endif #ifdef USE_IMAGEMAGICK diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 3328e111b..bde744d7e 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -63,3 +63,15 @@ if (BLACKMAGIC_FOUND) # Link test executable to the new library target_link_libraries(openshot-blackmagic openshot) endif() + +############### OPENCV EXAMPLE ################ +#if (DEFINED CACHE{HAVE_OPENCV}) +# # Create test executable +# add_executable(openshot-example-opencv +# Example_opencv.cpp) +# +# target_compile_definitions(openshot-example-opencv PRIVATE +# -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" ) +# # Link test executable to the new library +# target_link_libraries(openshot-example-opencv openshot) +#endif() diff --git a/examples/Example_opencv.cpp b/examples/Example_opencv.cpp index e820708a8..92576b3cb 100644 --- a/examples/Example_opencv.cpp +++ b/examples/Example_opencv.cpp @@ -31,12 +31,15 @@ #include #include #include -#include "../../include/CVTracker.h" -#include "../../include/CVStabilization.h" -#include "../../include/CVObjectDetection.h" +#include "CVTracker.h" +#include "CVStabilization.h" +#include "CVObjectDetection.h" -#include "../../include/OpenShot.h" -#include "../../include/CrashHandler.h" +#include "Clip.h" +#include "EffectBase.h" +#include "EffectInfo.h" +#include "Frame.h" +#include "CrashHandler.h" using namespace openshot; using namespace std; @@ -60,7 +63,7 @@ void displayClip(openshot::Clip &r9){ // Opencv display window cv::namedWindow("Display Image", cv::WINDOW_NORMAL ); - + // Get video lenght int videoLenght = r9.Reader()->info.video_length; @@ -118,7 +121,7 @@ int main(int argc, char* argv[]) { CVTracker tracker(trackerJson(r, false), processingController); // Start the tracking - tracker.trackClip(r9, 0, 100, true); + tracker.trackClip(r9, 0, 0, true); // Save the tracked data tracker.SaveTrackedData(); @@ -223,22 +226,23 @@ string jsonFormat(string key, string value, string type){ // Return JSON string for the tracker effect string trackerJson(cv::Rect2d r, bool onlyProtoPath){ - // Define path to save tracked data + // Define path to save tracked data string protobufDataPath = "kcf_tracker.data"; // Set the tracker string tracker = "KCF"; // Construct all the composition of the JSON string string protobuf_data_path = jsonFormat("protobuf_data_path", protobufDataPath); - string trackerType = jsonFormat("tracker_type", tracker); + string trackerType = jsonFormat("tracker-type", tracker); string bboxCoords = jsonFormat( - "bbox", - "{" + jsonFormat("x", to_string(r.x), "int") + - "," + jsonFormat("y", to_string(r.y), "int") + - "," + jsonFormat("w", to_string(r.width), "int") + - "," + jsonFormat("h", to_string(r.height), "int") + + "region", + "{" + jsonFormat("x", to_string(r.x), "int") + + "," + jsonFormat("y", to_string(r.y), "int") + + "," + jsonFormat("width", to_string(r.width), "int") + + "," + jsonFormat("height", to_string(r.height), "int") + + "," + jsonFormat("first-frame", to_string(0), "int") + "}", - "rstring"); + "rstring"); // Return only the the protobuf path in JSON format if(onlyProtoPath) @@ -251,7 +255,7 @@ string trackerJson(cv::Rect2d r, bool onlyProtoPath){ // Return JSON string for the stabilizer effect string stabilizerJson(bool onlyProtoPath){ - // Define path to save stabilized data + // Define path to save stabilized data string protobufDataPath = "example_stabilizer.data"; // Set smoothing window value string smoothingWindow = "30"; @@ -270,13 +274,13 @@ string stabilizerJson(bool onlyProtoPath){ string objectDetectionJson(bool onlyProtoPath){ - // Define path to save object detection data + // Define path to save object detection data string protobufDataPath = "example_object_detection.data"; // Define processing device string processingDevice = "GPU"; // Set path to model configuration file string modelConfiguration = "yolov3.cfg"; - // Set path to model weights + // Set path to model weights string modelWeights = "yolov3.weights"; // Set path to class names file string classesFile = "obj.names"; diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 625384cff..c14e84b73 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -86,6 +86,7 @@ set(OPENSHOT_SOURCES FrameMapper.cpp Json.cpp KeyFrame.cpp + TrackedObjectBase.cpp OpenShotVersion.cpp ZmqLogger.cpp PlayerBase.cpp @@ -105,6 +106,7 @@ set(OPENSHOT_CV_SOURCES CVStabilization.cpp ClipProcessingJobs.cpp CVObjectDetection.cpp + TrackedObjectBBox.cpp effects/Stabilizer.cpp effects/Tracker.cpp effects/ObjectDetection.cpp @@ -288,7 +290,7 @@ mark_as_advanced(QT_VERSION_STR) # Find FFmpeg libraries (used for video encoding / decoding) find_package(FFmpeg REQUIRED COMPONENTS avcodec avformat avutil swscale) -set(all_comps avcodec avformat avutil swscale) +set(all_comps avcodec avformat avutil swscale avresample) if(TARGET FFmpeg::swresample) list(APPEND all_comps swresample) else() @@ -507,3 +509,4 @@ endif() set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Jonathan Thomas") #required include(CPack) + diff --git a/src/CVObjectDetection.cpp b/src/CVObjectDetection.cpp index b6f5c2474..959c37bc9 100644 --- a/src/CVObjectDetection.cpp +++ b/src/CVObjectDetection.cpp @@ -2,6 +2,7 @@ * @file * @brief Source file for CVObjectDetection class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -82,10 +83,10 @@ void CVObjectDetection::detectObjectsClip(openshot::Clip &video, size_t _start, setProcessingDevice(); size_t frame_number; - if(!process_interval || end == 0 || end-start == 0){ + if(!process_interval || end <= 1 || end-start == 0){ // Get total number of frames in video - start = video.Start() * video.Reader()->info.fps.ToInt(); - end = video.End() * video.Reader()->info.fps.ToInt(); + start = (int)(video.Start() * video.Reader()->info.fps.ToFloat()); + end = (int)(video.End() * video.Reader()->info.fps.ToFloat()); } for (frame_number = start; frame_number <= end; frame_number++) @@ -105,7 +106,6 @@ void CVObjectDetection::detectObjectsClip(openshot::Clip &video, size_t _start, // Update progress processingController->SetProgress(uint(100*(frame_number-start)/(end-start))); - // std::cout<<"Frame: "< classIds; std::vector confidences; std::vector boxes; + std::vector objectIds; for (size_t i = 0; i < outs.size(); ++i) { @@ -180,13 +181,14 @@ void CVObjectDetection::postprocess(const cv::Size &frameDims, const std::vector sort.update(sortBoxes, frameId, sqrt(pow(frameDims.width,2) + pow(frameDims.height, 2)), confidences, classIds); // Clear data vectors - boxes.clear(); confidences.clear(); classIds.clear(); + boxes.clear(); confidences.clear(); classIds.clear(); objectIds.clear(); // Get SORT predicted boxes for(auto TBox : sort.frameTrackingResult){ if(TBox.frame == frameId){ boxes.push_back(TBox.box); confidences.push_back(TBox.confidence); classIds.push_back(TBox.classId); + objectIds.push_back(TBox.id); } } @@ -202,12 +204,14 @@ void CVObjectDetection::postprocess(const cv::Size &frameDims, const std::vector boxes.erase(boxes.begin() + j); classIds.erase(classIds.begin() + j); confidences.erase(confidences.begin() + j); + objectIds.erase(objectIds.begin() + j); break; } else{ boxes.erase(boxes.begin() + i); classIds.erase(classIds.begin() + i); confidences.erase(confidences.begin() + i); + objectIds.erase(objectIds.begin() + i); i = 0; break; } @@ -226,12 +230,14 @@ void CVObjectDetection::postprocess(const cv::Size &frameDims, const std::vector boxes.erase(boxes.begin() + j); classIds.erase(classIds.begin() + j); confidences.erase(confidences.begin() + j); + objectIds.erase(objectIds.begin() + j); break; } else{ boxes.erase(boxes.begin() + i); classIds.erase(classIds.begin() + i); confidences.erase(confidences.begin() + i); + objectIds.erase(objectIds.begin() + i); i = 0; break; } @@ -251,7 +257,7 @@ void CVObjectDetection::postprocess(const cv::Size &frameDims, const std::vector normalized_boxes.push_back(normalized_box); } - detectionsData[frameId] = CVDetectionData(classIds, confidences, normalized_boxes, frameId); + detectionsData[frameId] = CVDetectionData(classIds, confidences, normalized_boxes, frameId, objectIds); } // Compute IOU between 2 boxes @@ -359,6 +365,7 @@ void CVObjectDetection::AddFrameDataToProto(pb_objdetect::Frame* pbFrameData, CV box->set_h(dData.boxes.at(i).height); box->set_classid(dData.classIds.at(i)); box->set_confidence(dData.confidences.at(i)); + box->set_objectid(dData.objectIds.at(i)); } } @@ -461,7 +468,10 @@ bool CVObjectDetection::_LoadObjDetectdData(){ const google::protobuf::RepeatedPtrField &pBox = pbFrameData.bounding_box(); // Construct data vectors related to detections in the current frame - std::vector classIds; std::vector confidences; std::vector> boxes; + std::vector classIds; + std::vector confidences; + std::vector> boxes; + std::vector objectIds; for(int i = 0; i < pbFrameData.bounding_box_size(); i++){ // Get bounding box coordinates @@ -472,13 +482,15 @@ bool CVObjectDetection::_LoadObjDetectdData(){ // Get class Id (which will be assign to a class name) and prediction confidence int classId = pBox.Get(i).classid(); float confidence = pBox.Get(i).confidence(); + // Get object Id + int objectId = pBox.Get(i).objectid(); // Push back data into vectors boxes.push_back(box); classIds.push_back(classId); confidences.push_back(confidence); } // Assign data to object detector map - detectionsData[id] = CVDetectionData(classIds, confidences, boxes, id); + detectionsData[id] = CVDetectionData(classIds, confidences, boxes, id, objectIds); } // Delete all global objects allocated by libprotobuf. diff --git a/src/CVObjectDetection.h b/src/CVObjectDetection.h index 1747d5e16..326fc7ce0 100644 --- a/src/CVObjectDetection.h +++ b/src/CVObjectDetection.h @@ -2,6 +2,7 @@ * @file * @brief Header file for CVObjectDetection class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -49,16 +50,24 @@ namespace openshot // Stores the detected object bounding boxes and its properties. struct CVDetectionData{ CVDetectionData(){} - CVDetectionData(std::vector _classIds, std::vector _confidences, std::vector> _boxes, size_t _frameId){ + CVDetectionData( + std::vector _classIds, + std::vector _confidences, + std::vector> _boxes, + size_t _frameId, + std::vector _objectIds) + { classIds = _classIds; confidences = _confidences; boxes = _boxes; frameId = _frameId; + objectIds = _objectIds; } size_t frameId; std::vector classIds; std::vector confidences; std::vector> boxes; + std::vector objectIds; }; /** diff --git a/src/CVStabilization.cpp b/src/CVStabilization.cpp index 3600954ff..dd1c6b47c 100644 --- a/src/CVStabilization.cpp +++ b/src/CVStabilization.cpp @@ -2,6 +2,7 @@ * @file * @brief Source file for CVStabilization class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -43,8 +44,8 @@ using google::protobuf::util::TimeUtil; CVStabilization::CVStabilization(std::string processInfoJson, ProcessingController &processingController) : processingController(&processingController){ SetJson(processInfoJson); - start = 0; - end = 0; + start = 1; + end = 1; } // Process clip and store necessary stabilization data @@ -64,10 +65,10 @@ void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t cv::Size readerDims(video.Reader()->info.width, video.Reader()->info.height); size_t frame_number; - if(!process_interval || end == 0 || end-start == 0){ + if(!process_interval || end <= 1 || end-start == 0){ // Get total number of frames in video - start = video.Start() * video.Reader()->info.fps.ToInt(); - end = video.End() * video.Reader()->info.fps.ToInt(); + start = (int)(video.Start() * video.Reader()->info.fps.ToFloat()) + 1; + end = (int)(video.End() * video.Reader()->info.fps.ToFloat()) + 1; } // Extract and track opticalflow features for each frame diff --git a/src/CVStabilization.h b/src/CVStabilization.h index 96cbf7125..4dd04ca96 100644 --- a/src/CVStabilization.h +++ b/src/CVStabilization.h @@ -2,6 +2,7 @@ * @file * @brief Header file for CVStabilization class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ diff --git a/src/CVTracker.cpp b/src/CVTracker.cpp index 67157918e..b4fb85ea5 100644 --- a/src/CVTracker.cpp +++ b/src/CVTracker.cpp @@ -2,6 +2,7 @@ * @file * @brief Track an object selected by the user * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -44,8 +45,8 @@ using google::protobuf::util::TimeUtil; CVTracker::CVTracker(std::string processInfoJson, ProcessingController &processingController) : processingController(&processingController), json_interval(false){ SetJson(processInfoJson); - start = 0; - end = 0; + start = 1; + end = 1; } // Set desirable tracker method @@ -76,15 +77,15 @@ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, boo if(!json_interval){ start = _start; end = _end; - if(!process_interval || end <= 0 || end-start == 0){ + if(!process_interval || end <= 1 || end-start == 0){ // Get total number of frames in video - start = video.Start() * video.Reader()->info.fps.ToInt(); - end = video.End() * video.Reader()->info.fps.ToInt(); + start = (int)(video.Start() * video.Reader()->info.fps.ToFloat()) + 1; + end = (int)(video.End() * video.Reader()->info.fps.ToFloat()) + 1; } } else{ - start = start + video.Start() * video.Reader()->info.fps.ToInt(); - end = video.End() * video.Reader()->info.fps.ToInt(); + start = (int)(start + video.Start() * video.Reader()->info.fps.ToFloat()) + 1; + end = (int)(video.End() * video.Reader()->info.fps.ToFloat()) + 1; } if(error){ @@ -111,6 +112,12 @@ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, boo // Grab OpenCV Mat image cv::Mat cvimage = f->GetImageCV(); + if(frame == start){ + // Take the normalized inital bounding box and multiply to the current video shape + bbox = cv::Rect2d(bbox.x*cvimage.cols,bbox.y*cvimage.rows,bbox.width*cvimage.cols, + bbox.height*cvimage.rows); + } + // Pass the first frame to initialize the tracker if(!trackerInit){ @@ -175,30 +182,42 @@ bool CVTracker::trackFrame(cv::Mat &frame, size_t frameId){ float fw = frame.size().width; float fh = frame.size().height; - std::vector bboxes = {bbox}; - std::vector confidence = {1.0}; - std::vector classId = {1}; - - sort.update(bboxes, frameId, sqrt(pow(frame.rows, 2) + pow(frame.cols, 2)), confidence, classId); - - for(auto TBox : sort.frameTrackingResult) - bbox = TBox.box; - + cv::Rect2d filtered_box = filter_box_jitter(frameId); // Add new frame data - trackedDataById[frameId] = FrameData(frameId, 0, (bbox.x)/fw, - (bbox.y)/fh, - (bbox.x+bbox.width)/fw, - (bbox.y+bbox.height)/fh); + trackedDataById[frameId] = FrameData(frameId, 0, (filtered_box.x)/fw, + (filtered_box.y)/fh, + (filtered_box.x+filtered_box.width)/fw, + (filtered_box.y+filtered_box.height)/fh); } else { - // Add new frame data - trackedDataById[frameId] = FrameData(frameId); + // Copy the last frame data if the tracker get lost + trackedDataById[frameId] = trackedDataById[frameId-1]; } return ok; } +cv::Rect2d CVTracker::filter_box_jitter(size_t frameId){ + // get tracked data for the previous frame + float last_box_width = trackedDataById[frameId-1].x2 - trackedDataById[frameId-1].x1; + float last_box_height = trackedDataById[frameId-1].y2 - trackedDataById[frameId-1].y1; + + float curr_box_width = bbox.width; + float curr_box_height = bbox.height; + // keep the last width and height if the difference is less than 1% + float threshold = 0.01; + + cv::Rect2d filtered_box = bbox; + if(std::abs(1-(curr_box_width/last_box_width)) <= threshold){ + filtered_box.width = last_box_width; + } + if(std::abs(1-(curr_box_height/last_box_height)) <= threshold){ + filtered_box.height = last_box_height; + } + return filtered_box; +} + bool CVTracker::SaveTrackedData(){ using std::ios; @@ -289,10 +308,10 @@ void CVTracker::SetJsonValue(const Json::Value root) { } if (!root["region"].isNull()){ - double x = root["region"]["x"].asDouble(); - double y = root["region"]["y"].asDouble(); - double w = root["region"]["width"].asDouble(); - double h = root["region"]["height"].asDouble(); + double x = root["region"]["normalized_x"].asDouble(); + double y = root["region"]["normalized_y"].asDouble(); + double w = root["region"]["normalized_width"].asDouble(); + double h = root["region"]["normalized_height"].asDouble(); cv::Rect2d prev_bbox(x,y,w,h); bbox = prev_bbox; } diff --git a/src/CVTracker.h b/src/CVTracker.h index 4430bd4ba..db7b728fa 100644 --- a/src/CVTracker.h +++ b/src/CVTracker.h @@ -2,6 +2,7 @@ * @file * @brief Track an object selected by the user * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -126,6 +127,9 @@ namespace openshot /// If start, end and process_interval are passed as argument, clip will be processed in [start,end) void trackClip(openshot::Clip& video, size_t _start=0, size_t _end=0, bool process_interval=false); + /// Filter current bounding box jitter + cv::Rect2d filter_box_jitter(size_t frameId); + /// Get tracked data for a given frame FrameData GetTrackedData(size_t frameId); diff --git a/src/Clip.cpp b/src/Clip.cpp index 92fff2c36..451b90738 100644 --- a/src/Clip.cpp +++ b/src/Clip.cpp @@ -58,6 +58,7 @@ void Clip::init_settings() mixing = VOLUME_MIX_NONE; waveform = false; previous_properties = ""; + parentObjectId = ""; // Init scale curves scale_x = Keyframe(1.0); @@ -99,6 +100,10 @@ void Clip::init_settings() has_audio = Keyframe(-1.0); has_video = Keyframe(-1.0); + // Initialize the attached object and attached clip as null pointers + parentTrackedObject = nullptr; + parentClipObject = NULL; + // Init reader info struct init_reader_settings(); } @@ -240,6 +245,40 @@ Clip::~Clip() } } +// Attach clip to bounding box +void Clip::AttachToObject(std::string object_id) +{ + // Search for the tracked object on the timeline + Timeline* parentTimeline = (Timeline *) ParentTimeline(); + + if (parentTimeline) { + // Create a smart pointer to the tracked object from the timeline + std::shared_ptr trackedObject = parentTimeline->GetTrackedObject(object_id); + Clip* clipObject = parentTimeline->GetClip(object_id); + + // Check for valid tracked object + if (trackedObject){ + SetAttachedObject(trackedObject); + } + else if (clipObject) { + SetAttachedClip(clipObject); + } + } + return; +} + +// Set the pointer to the trackedObject this clip is attached to +void Clip::SetAttachedObject(std::shared_ptr trackedObject){ + parentTrackedObject = trackedObject; + return; +} + +// Set the pointer to the clip this clip is attached to +void Clip::SetAttachedClip(Clip* clipObject){ + parentClipObject = clipObject; + return; +} + /// Set the current reader void Clip::Reader(ReaderBase* new_reader) { @@ -736,7 +775,11 @@ std::string Clip::PropertiesJSON(int64_t requested_frame) const { root["display"] = add_property_json("Frame Number", display, "int", "", NULL, 0, 3, false, requested_frame); root["mixing"] = add_property_json("Volume Mixing", mixing, "int", "", NULL, 0, 2, false, requested_frame); root["waveform"] = add_property_json("Waveform", waveform, "int", "", NULL, 0, 1, false, requested_frame); - + if (!parentObjectId.empty()) { + root["parentObjectId"] = add_property_json("Parent", 0.0, "string", parentObjectId, NULL, -1, -1, false, requested_frame); + } else { + root["parentObjectId"] = add_property_json("Parent", 0.0, "string", "", NULL, -1, -1, false, requested_frame); + } // Add gravity choices (dropdown style) root["gravity"]["choices"].append(add_property_choice_json("Top Left", GRAVITY_TOP_LEFT, gravity)); root["gravity"]["choices"].append(add_property_choice_json("Top Center", GRAVITY_TOP, gravity)); @@ -752,32 +795,93 @@ std::string Clip::PropertiesJSON(int64_t requested_frame) const { root["scale"]["choices"].append(add_property_choice_json("Crop", SCALE_CROP, scale)); root["scale"]["choices"].append(add_property_choice_json("Best Fit", SCALE_FIT, scale)); root["scale"]["choices"].append(add_property_choice_json("Stretch", SCALE_STRETCH, scale)); - root["scale"]["choices"].append(add_property_choice_json("None", SCALE_NONE, scale)); + root["scale"]["choices"].append(add_property_choice_json("", SCALE_NONE, scale)); // Add frame number display choices (dropdown style) - root["display"]["choices"].append(add_property_choice_json("None", FRAME_DISPLAY_NONE, display)); + root["display"]["choices"].append(add_property_choice_json("", FRAME_DISPLAY_NONE, display)); root["display"]["choices"].append(add_property_choice_json("Clip", FRAME_DISPLAY_CLIP, display)); root["display"]["choices"].append(add_property_choice_json("Timeline", FRAME_DISPLAY_TIMELINE, display)); root["display"]["choices"].append(add_property_choice_json("Both", FRAME_DISPLAY_BOTH, display)); // Add volume mixing choices (dropdown style) - root["mixing"]["choices"].append(add_property_choice_json("None", VOLUME_MIX_NONE, mixing)); + root["mixing"]["choices"].append(add_property_choice_json("", VOLUME_MIX_NONE, mixing)); root["mixing"]["choices"].append(add_property_choice_json("Average", VOLUME_MIX_AVERAGE, mixing)); root["mixing"]["choices"].append(add_property_choice_json("Reduce", VOLUME_MIX_REDUCE, mixing)); // Add waveform choices (dropdown style) root["waveform"]["choices"].append(add_property_choice_json("Yes", true, waveform)); root["waveform"]["choices"].append(add_property_choice_json("No", false, waveform)); + + // Add the parentTrackedObject's properties + if (parentTrackedObject) + { + // Convert Clip's frame position to Timeline's frame position + long clip_start_position = round(Position() * info.fps.ToDouble()) + 1; + long clip_start_frame = (Start() * info.fps.ToDouble()) + 1; + double timeline_frame_number = requested_frame + clip_start_position - clip_start_frame; + + // Get attached object's parent clip properties + std::map< std::string, float > trackedObjectParentClipProperties = parentTrackedObject->GetParentClipProperties(timeline_frame_number); + double parentObject_frame_number = trackedObjectParentClipProperties["frame_number"]; + // Get attached object properties + std::map< std::string, float > trackedObjectProperties = parentTrackedObject->GetBoxValues(parentObject_frame_number); + + // Correct the parent Tracked Object properties by the clip's reference system + float parentObject_location_x = trackedObjectProperties["cx"] - 0.5 + trackedObjectParentClipProperties["cx"]; + float parentObject_location_y = trackedObjectProperties["cy"] - 0.5 + trackedObjectParentClipProperties["cy"]; + float parentObject_scale_x = trackedObjectProperties["w"]*trackedObjectProperties["sx"]; + float parentObject_scale_y = trackedObjectProperties["h"]*trackedObjectProperties["sy"]; + float parentObject_rotation = trackedObjectProperties["r"] + trackedObjectParentClipProperties["r"]; + + // Add the parent Tracked Object properties to JSON + root["location_x"] = add_property_json("Location X", parentObject_location_x, "float", "", &location_x, -1.0, 1.0, false, requested_frame); + root["location_y"] = add_property_json("Location Y", parentObject_location_y, "float", "", &location_y, -1.0, 1.0, false, requested_frame); + root["scale_x"] = add_property_json("Scale X", parentObject_scale_x, "float", "", &scale_x, 0.0, 1.0, false, requested_frame); + root["scale_y"] = add_property_json("Scale Y", parentObject_scale_y, "float", "", &scale_y, 0.0, 1.0, false, requested_frame); + root["rotation"] = add_property_json("Rotation", parentObject_rotation, "float", "", &rotation, -360, 360, false, requested_frame); + root["shear_x"] = add_property_json("Shear X", shear_x.GetValue(requested_frame), "float", "", &shear_x, -1.0, 1.0, false, requested_frame); + root["shear_y"] = add_property_json("Shear Y", shear_y.GetValue(requested_frame), "float", "", &shear_y, -1.0, 1.0, false, requested_frame); + } + // Add the parentClipObject's properties + else if (parentClipObject) + { + // Convert Clip's frame position to Timeline's frame position + long clip_start_position = round(Position() * info.fps.ToDouble()) + 1; + long clip_start_frame = (Start() * info.fps.ToDouble()) + 1; + double timeline_frame_number = requested_frame + clip_start_position - clip_start_frame; + + // Correct the parent Clip Object properties by the clip's reference system + float parentObject_location_x = parentClipObject->location_x.GetValue(timeline_frame_number); + float parentObject_location_y = parentClipObject->location_y.GetValue(timeline_frame_number); + float parentObject_scale_x = parentClipObject->scale_x.GetValue(timeline_frame_number); + float parentObject_scale_y = parentClipObject->scale_y.GetValue(timeline_frame_number); + float parentObject_shear_x = parentClipObject->shear_x.GetValue(timeline_frame_number); + float parentObject_shear_y = parentClipObject->shear_y.GetValue(timeline_frame_number); + float parentObject_rotation = parentClipObject->rotation.GetValue(timeline_frame_number); + + // Add the parent Clip Object properties to JSON + root["location_x"] = add_property_json("Location X", parentObject_location_x, "float", "", &location_x, -1.0, 1.0, false, requested_frame); + root["location_y"] = add_property_json("Location Y", parentObject_location_y, "float", "", &location_y, -1.0, 1.0, false, requested_frame); + root["scale_x"] = add_property_json("Scale X", parentObject_scale_x, "float", "", &scale_x, 0.0, 1.0, false, requested_frame); + root["scale_y"] = add_property_json("Scale Y", parentObject_scale_y, "float", "", &scale_y, 0.0, 1.0, false, requested_frame); + root["rotation"] = add_property_json("Rotation", parentObject_rotation, "float", "", &rotation, -360, 360, false, requested_frame); + root["shear_x"] = add_property_json("Shear X", parentObject_shear_x, "float", "", &shear_x, -1.0, 1.0, false, requested_frame); + root["shear_y"] = add_property_json("Shear Y", parentObject_shear_y, "float", "", &shear_y, -1.0, 1.0, false, requested_frame); + } + else + { + // Add this own clip's properties to JSON + root["location_x"] = add_property_json("Location X", location_x.GetValue(requested_frame), "float", "", &location_x, -1.0, 1.0, false, requested_frame); + root["location_y"] = add_property_json("Location Y", location_y.GetValue(requested_frame), "float", "", &location_y, -1.0, 1.0, false, requested_frame); + root["scale_x"] = add_property_json("Scale X", scale_x.GetValue(requested_frame), "float", "", &scale_x, 0.0, 1.0, false, requested_frame); + root["scale_y"] = add_property_json("Scale Y", scale_y.GetValue(requested_frame), "float", "", &scale_y, 0.0, 1.0, false, requested_frame); + root["rotation"] = add_property_json("Rotation", rotation.GetValue(requested_frame), "float", "", &rotation, -360, 360, false, requested_frame); + root["shear_x"] = add_property_json("Shear X", shear_x.GetValue(requested_frame), "float", "", &shear_x, -1.0, 1.0, false, requested_frame); + root["shear_y"] = add_property_json("Shear Y", shear_y.GetValue(requested_frame), "float", "", &shear_y, -1.0, 1.0, false, requested_frame); + } // Keyframes - root["location_x"] = add_property_json("Location X", location_x.GetValue(requested_frame), "float", "", &location_x, -1.0, 1.0, false, requested_frame); - root["location_y"] = add_property_json("Location Y", location_y.GetValue(requested_frame), "float", "", &location_y, -1.0, 1.0, false, requested_frame); - root["scale_x"] = add_property_json("Scale X", scale_x.GetValue(requested_frame), "float", "", &scale_x, 0.0, 1.0, false, requested_frame); - root["scale_y"] = add_property_json("Scale Y", scale_y.GetValue(requested_frame), "float", "", &scale_y, 0.0, 1.0, false, requested_frame); root["alpha"] = add_property_json("Alpha", alpha.GetValue(requested_frame), "float", "", &alpha, 0.0, 1.0, false, requested_frame); - root["shear_x"] = add_property_json("Shear X", shear_x.GetValue(requested_frame), "float", "", &shear_x, -1.0, 1.0, false, requested_frame); - root["shear_y"] = add_property_json("Shear Y", shear_y.GetValue(requested_frame), "float", "", &shear_y, -1.0, 1.0, false, requested_frame); - root["rotation"] = add_property_json("Rotation", rotation.GetValue(requested_frame), "float", "", &rotation, -360, 360, false, requested_frame); root["origin_x"] = add_property_json("Origin X", origin_x.GetValue(requested_frame), "float", "", &origin_x, 0.0, 1.0, false, requested_frame); root["origin_y"] = add_property_json("Origin Y", origin_y.GetValue(requested_frame), "float", "", &origin_y, 0.0, 1.0, false, requested_frame); root["volume"] = add_property_json("Volume", volume.GetValue(requested_frame), "float", "", &volume, 0.0, 1.0, false, requested_frame); @@ -810,6 +914,7 @@ Json::Value Clip::JsonValue() const { // Create root json object Json::Value root = ClipBase::JsonValue(); // get parent properties + root["parentObjectId"] = parentObjectId; root["gravity"] = gravity; root["scale"] = scale; root["anchor"] = anchor; @@ -884,6 +989,15 @@ void Clip::SetJsonValue(const Json::Value root) { ClipBase::SetJsonValue(root); // Set data from Json (if key is found) + if (!root["parentObjectId"].isNull()){ + parentObjectId = root["parentObjectId"].asString(); + if (parentObjectId.size() > 0 && parentObjectId != ""){ + AttachToObject(parentObjectId); + } else{ + parentTrackedObject = nullptr; + parentClipObject = NULL; + } + } if (!root["gravity"].isNull()) gravity = (GravityType) root["gravity"].asInt(); if (!root["scale"].isNull()) @@ -1066,6 +1180,40 @@ void Clip::AddEffect(EffectBase* effect) // Sort effects sort_effects(); + + // Get the parent timeline of this clip + Timeline* parentTimeline = (Timeline *) ParentTimeline(); + + if (parentTimeline) + effect->ParentTimeline(parentTimeline); + + #ifdef USE_OPENCV + // Add Tracked Object to Timeline + if (effect->info.has_tracked_object){ + + // Check if this clip has a parent timeline + if (parentTimeline){ + + effect->ParentTimeline(parentTimeline); + + // Iterate through effect's vector of Tracked Objects + for (auto const& trackedObject : effect->trackedObjects){ + + // Cast the Tracked Object as TrackedObjectBBox + std::shared_ptr trackedObjectBBox = std::static_pointer_cast(trackedObject.second); + + // Set the Tracked Object's parent clip to this + trackedObjectBBox->ParentClip(this); + + // Add the Tracked Object to the timeline + parentTimeline->AddTrackedObject(trackedObjectBBox); + } + } + } + #endif + + // Clear cache + cache.Clear(); } // Remove an effect from the clip @@ -1206,6 +1354,12 @@ QTransform Clip::get_transform(std::shared_ptr frame, int width, int heig /* RESIZE SOURCE IMAGE - based on scale type */ QSize source_size = source_image->size(); + + // Apply stretch scale to correctly fit the bounding-box + if (parentTrackedObject){ + scale = SCALE_STRETCH; + } + switch (scale) { case (SCALE_FIT): { @@ -1243,6 +1397,74 @@ QTransform Clip::get_transform(std::shared_ptr frame, int width, int heig } } + // Initialize parent object's properties (Clip or Tracked Object) + float parentObject_location_x = 0.0; + float parentObject_location_y = 0.0; + float parentObject_scale_x = 1.0; + float parentObject_scale_y = 1.0; + float parentObject_shear_x = 0.0; + float parentObject_shear_y = 0.0; + float parentObject_rotation = 0.0; + + // Get the parentClipObject properties + if (parentClipObject){ + + // Convert Clip's frame position to Timeline's frame position + long clip_start_position = round(Position() * info.fps.ToDouble()) + 1; + long clip_start_frame = (Start() * info.fps.ToDouble()) + 1; + double timeline_frame_number = frame->number + clip_start_position - clip_start_frame; + + // Get parent object's properties (Clip) + parentObject_location_x = parentClipObject->location_x.GetValue(timeline_frame_number); + parentObject_location_y = parentClipObject->location_y.GetValue(timeline_frame_number); + parentObject_scale_x = parentClipObject->scale_x.GetValue(timeline_frame_number); + parentObject_scale_y = parentClipObject->scale_y.GetValue(timeline_frame_number); + parentObject_shear_x = parentClipObject->shear_x.GetValue(timeline_frame_number); + parentObject_shear_y = parentClipObject->shear_y.GetValue(timeline_frame_number); + parentObject_rotation = parentClipObject->rotation.GetValue(timeline_frame_number); + } + + // Get the parentTrackedObject properties + if (parentTrackedObject){ + + // Convert Clip's frame position to Timeline's frame position + long clip_start_position = round(Position() * info.fps.ToDouble()) + 1; + long clip_start_frame = (Start() * info.fps.ToDouble()) + 1; + double timeline_frame_number = frame->number + clip_start_position - clip_start_frame; + + // Get parentTrackedObject's parent clip's properties + std::map trackedObjectParentClipProperties = parentTrackedObject->GetParentClipProperties(timeline_frame_number); + + // Get the attached object's parent clip's properties + if (!trackedObjectParentClipProperties.empty()) + { + // Get parent object's properties (Tracked Object) + float parentObject_frame_number = trackedObjectParentClipProperties["frame_number"]; + + // Access the parentTrackedObject's properties + std::map trackedObjectProperties = parentTrackedObject->GetBoxValues(parentObject_frame_number); + + // Get the Tracked Object's properties and correct them by the clip's reference system + parentObject_location_x = trackedObjectProperties["cx"] - 0.5 + trackedObjectParentClipProperties["location_x"]; + parentObject_location_y = trackedObjectProperties["cy"] - 0.5 + trackedObjectParentClipProperties["location_y"]; + parentObject_scale_x = trackedObjectProperties["w"]*trackedObjectProperties["sx"]; + parentObject_scale_y = trackedObjectProperties["h"]*trackedObjectProperties["sy"]; + parentObject_rotation = trackedObjectProperties["r"] + trackedObjectParentClipProperties["rotation"]; + } + else + { + // Access the parentTrackedObject's properties + std::map trackedObjectProperties = parentTrackedObject->GetBoxValues(timeline_frame_number); + + // Get the Tracked Object's properties and correct them by the clip's reference system + parentObject_location_x = trackedObjectProperties["cx"] - 0.5; + parentObject_location_y = trackedObjectProperties["cy"] - 0.5; + parentObject_scale_x = trackedObjectProperties["w"]*trackedObjectProperties["sx"]; + parentObject_scale_y = trackedObjectProperties["h"]*trackedObjectProperties["sy"]; + parentObject_rotation = trackedObjectProperties["r"]; + } + } + /* GRAVITY LOCATION - Initialize X & Y to the correct values (before applying location curves) */ float x = 0.0; // left float y = 0.0; // top @@ -1250,9 +1472,16 @@ QTransform Clip::get_transform(std::shared_ptr frame, int width, int heig // Adjust size for scale x and scale y float sx = scale_x.GetValue(frame->number); // percentage X scale float sy = scale_y.GetValue(frame->number); // percentage Y scale + + // Change clip's scale to parentObject's scale + if(parentObject_scale_x != 0.0 && parentObject_scale_y != 0.0){ + sx*= parentObject_scale_x; + sy*= parentObject_scale_y; + } + float scaled_source_width = source_size.width() * sx; float scaled_source_height = source_size.height() * sy; - + switch (gravity) { case (GRAVITY_TOP_LEFT): @@ -1291,25 +1520,24 @@ QTransform Clip::get_transform(std::shared_ptr frame, int width, int heig // Debug output ZmqLogger::Instance()->AppendDebugMethod("Clip::get_transform (Gravity)", "frame->number", frame->number, "source_clip->gravity", gravity, "scaled_source_width", scaled_source_width, "scaled_source_height", scaled_source_height); + QTransform transform; + /* LOCATION, ROTATION, AND SCALE */ - float r = rotation.GetValue(frame->number); // rotate in degrees - x += (width * location_x.GetValue(frame->number)); // move in percentage of final width - y += (height * location_y.GetValue(frame->number)); // move in percentage of final height - float shear_x_value = shear_x.GetValue(frame->number); - float shear_y_value = shear_y.GetValue(frame->number); + float r = rotation.GetValue(frame->number) + parentObject_rotation; // rotate in degrees + x += (width * (location_x.GetValue(frame->number) + parentObject_location_x )); // move in percentage of final width + y += (height * (location_y.GetValue(frame->number) + parentObject_location_y )); // move in percentage of final height + float shear_x_value = shear_x.GetValue(frame->number) + parentObject_shear_x; + float shear_y_value = shear_y.GetValue(frame->number) + parentObject_shear_y; float origin_x_value = origin_x.GetValue(frame->number); float origin_y_value = origin_y.GetValue(frame->number); - QTransform transform; - // Transform source image (if needed) ZmqLogger::Instance()->AppendDebugMethod("Clip::get_transform (Build QTransform - if needed)", "frame->number", frame->number, "x", x, "y", y, "r", r, "sx", sx, "sy", sy); if (!isEqual(x, 0) || !isEqual(y, 0)) { // TRANSLATE/MOVE CLIP transform.translate(x, y); - } - + } if (!isEqual(r, 0) || !isEqual(shear_x_value, 0) || !isEqual(shear_y_value, 0)) { // ROTATE CLIP (around origin_x, origin_y) float origin_x_offset = (scaled_source_width * origin_x_value); @@ -1319,11 +1547,9 @@ QTransform Clip::get_transform(std::shared_ptr frame, int width, int heig transform.shear(shear_x_value, shear_y_value); transform.translate(-origin_x_offset,-origin_y_offset); } - // SCALE CLIP (if needed) float source_width_scale = (float(source_size.width()) / float(source_image->width())) * sx; float source_height_scale = (float(source_size.height()) / float(source_image->height())) * sy; - if (!isEqual(source_width_scale, 1.0) || !isEqual(source_height_scale, 1.0)) { transform.scale(source_width_scale, source_height_scale); } diff --git a/src/Clip.h b/src/Clip.h index 226e1a28b..2b9caceda 100644 --- a/src/Clip.h +++ b/src/Clip.h @@ -44,6 +44,7 @@ #include #include #include + #include "AudioResampler.h" #include "ClipBase.h" #include "Color.h" @@ -53,6 +54,7 @@ #include "EffectInfo.h" #include "Frame.h" #include "KeyFrame.h" +#include "TrackedObjectBase.h" #include "ReaderBase.h" #include @@ -122,6 +124,10 @@ namespace openshot { bool waveform; ///< Should a waveform be used instead of the clip's image std::list effects; ///< List of clips on this timeline bool is_open; ///< Is Reader opened + std::string parentObjectId; ///< Id of the bounding box that this clip is attached to + std::shared_ptr parentTrackedObject; ///< Tracked object this clip is attached to + openshot::Clip* parentClipObject; ///< Clip object this clip is attached to + // Audio resampler (if time mapping) openshot::AudioResampler *resampler; @@ -197,6 +203,23 @@ namespace openshot { /// Determine if reader is open or closed bool IsOpen() override { return is_open; }; + /// Get and set the object id that this clip is attached to + std::string GetAttachedId() const { return parentObjectId; }; + /// Set id of the object id that this clip is attached to + void SetAttachedId(std::string value) { parentObjectId = value; }; + + /// Attach clip to Tracked Object or to another Clip + void AttachToObject(std::string object_id); + + /// Set the pointer to the trackedObject this clip is attached to + void SetAttachedObject(std::shared_ptr trackedObject); + /// Set the pointer to the clip this clip is attached to + void SetAttachedClip(Clip* clipObject); + /// Return a pointer to the trackedObject this clip is attached to + std::shared_ptr GetAttachedObject() const { return parentTrackedObject; }; + /// Return a pointer to the clip this clip is attached to + Clip* GetAttachedClip() const { return parentClipObject; }; + /// Return the type name of the class std::string Name() override { return "Clip"; }; diff --git a/src/ClipProcessingJobs.cpp b/src/ClipProcessingJobs.cpp index b8d7e00a3..cc3007d6a 100644 --- a/src/ClipProcessingJobs.cpp +++ b/src/ClipProcessingJobs.cpp @@ -1,3 +1,34 @@ +/** + * @file + * @brief Source file for ClipProcessingJobs class + * @author Jonathan Thomas + * @author Brenno Caldato + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + #include "ClipProcessingJobs.h" namespace openshot { diff --git a/src/ClipProcessingJobs.h b/src/ClipProcessingJobs.h index afea1ffa9..66c55c3e9 100644 --- a/src/ClipProcessingJobs.h +++ b/src/ClipProcessingJobs.h @@ -1,7 +1,8 @@ /** * @file - * @brief Header for the ClipProcessingJobs class + * @brief Header for ClipProcessingJobs class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ diff --git a/src/Color.cpp b/src/Color.cpp index fe3055918..e848f1f7f 100644 --- a/src/Color.cpp +++ b/src/Color.cpp @@ -75,6 +75,17 @@ std::string Color::GetColorHex(int64_t frame_number) { return QColor( r,g,b,a ).name().toStdString(); } +// Get the RGBA values of a color at a specific frame +std::vector Color::GetColorRGBA(int64_t frame_number) { + std::vector rgba; + rgba.push_back(red.GetInt(frame_number)); + rgba.push_back(green.GetInt(frame_number)); + rgba.push_back(blue.GetInt(frame_number)); + rgba.push_back(alpha.GetInt(frame_number)); + + return rgba; +} + // Get the distance between 2 RGB pairs (alpha is ignored) long Color::GetDistance(long R1, long G1, long B1, long R2, long G2, long B2) { diff --git a/src/Color.h b/src/Color.h index c3c900715..8b7eaae24 100644 --- a/src/Color.h +++ b/src/Color.h @@ -65,6 +65,9 @@ namespace openshot { /// Get the HEX value of a color at a specific frame std::string GetColorHex(int64_t frame_number); + // Get the RGBA values of a color at a specific frame + std::vector GetColorRGBA(int64_t frame_number); + /// Get the distance between 2 RGB pairs. (0=identical colors, 10=very close colors, 760=very different colors) static long GetDistance(long R1, long G1, long B1, long R2, long G2, long B2); diff --git a/src/EffectBase.cpp b/src/EffectBase.cpp index 81ba72998..e22283fe7 100644 --- a/src/EffectBase.cpp +++ b/src/EffectBase.cpp @@ -30,6 +30,7 @@ #include "EffectBase.h" #include "Exceptions.h" +#include "Timeline.h" using namespace openshot; @@ -44,10 +45,14 @@ void EffectBase::InitEffectInfo() Order(0); ParentClip(NULL); + parentEffect = NULL; + info.has_video = false; info.has_audio = false; + info.has_tracked_object = false; info.name = ""; info.description = ""; + info.parent_effect_id = ""; } // Display file information @@ -90,8 +95,10 @@ Json::Value EffectBase::JsonValue() const { root["name"] = info.name; root["class_name"] = info.class_name; root["description"] = info.description; + root["parent_effect_id"] = info.parent_effect_id; root["has_video"] = info.has_video; root["has_audio"] = info.has_audio; + root["has_tracked_object"] = info.has_tracked_object; root["order"] = Order(); // return JsonValue @@ -99,12 +106,12 @@ Json::Value EffectBase::JsonValue() const { } // Load JSON string into this object -void EffectBase::SetJson(const std::string value) { +void EffectBase::SetJson(std::string value) { // Parse JSON string into JSON objects try { - const Json::Value root = openshot::stringToJson(value); + Json::Value root = openshot::stringToJson(value); // Set all values that match SetJsonValue(root); } @@ -116,7 +123,31 @@ void EffectBase::SetJson(const std::string value) { } // Load Json::Value into this object -void EffectBase::SetJsonValue(const Json::Value root) { +void EffectBase::SetJsonValue(Json::Value root) { + + if (ParentTimeline()){ + // Get parent timeline + Timeline* parentTimeline = (Timeline *) ParentTimeline(); + + // Get the list of effects on the timeline + std::list effects = parentTimeline->ClipEffects(); + + // TODO: Fix recursive call for Object Detection + + // // Loop through the effects and check if we have a child effect linked to this effect + for (auto const& effect : effects){ + // Set the properties of all effects which parentEffect points to this + if ((effect->info.parent_effect_id == this->Id()) && (effect->Id() != this->Id())) + effect->SetJsonValue(root); + } + } + + // Set this effect properties with the parent effect properties (except the id and parent_effect_id) + if (parentEffect){ + root = parentEffect->JsonValue(); + root["id"] = this->Id(); + root["parent_effect_id"] = this->info.parent_effect_id; + } // Set parent data ClipBase::SetJsonValue(root); @@ -124,6 +155,14 @@ void EffectBase::SetJsonValue(const Json::Value root) { // Set data from Json (if key is found) if (!root["order"].isNull()) Order(root["order"].asInt()); + + if (!root["parent_effect_id"].isNull()){ + info.parent_effect_id = root["parent_effect_id"].asString(); + if (info.parent_effect_id.size() > 0 && info.parent_effect_id != "" && parentEffect == NULL) + SetParentEffect(info.parent_effect_id); + else + parentEffect = NULL; + } } // Generate Json::Value for this object @@ -150,3 +189,36 @@ openshot::ClipBase* EffectBase::ParentClip() { void EffectBase::ParentClip(openshot::ClipBase* new_clip) { clip = new_clip; } + +// Set the parent effect from which this properties will be set to +void EffectBase::SetParentEffect(std::string parentEffect_id) { + + // Get parent Timeline + Timeline* parentTimeline = (Timeline *) ParentTimeline(); + + if (parentTimeline){ + + // Get a pointer to the parentEffect + EffectBase* parentEffectPtr = parentTimeline->GetClipEffect(parentEffect_id); + + if (parentEffectPtr){ + // Set the parent Effect + parentEffect = parentEffectPtr; + + // Set the properties of this effect with the parent effect's properties + Json::Value EffectJSON = parentEffect->JsonValue(); + EffectJSON["id"] = this->Id(); + EffectJSON["parent_effect_id"] = this->info.parent_effect_id; + this->SetJsonValue(EffectJSON); + } + } + return; +} + +// Return the ID of this effect's parent clip +std::string EffectBase::ParentClipId() const{ + if(clip) + return clip->Id(); + else + return ""; +} \ No newline at end of file diff --git a/src/EffectBase.h b/src/EffectBase.h index 750987ec9..6e7418506 100644 --- a/src/EffectBase.h +++ b/src/EffectBase.h @@ -37,6 +37,7 @@ #include "ClipBase.h" #include "Json.h" #include "Frame.h" +#include "TrackedObjectBase.h" namespace openshot { @@ -52,8 +53,10 @@ namespace openshot std::string class_name; ///< The class name of the effect std::string name; ///< The name of the effect std::string description; ///< The description of this effect and what it does + std::string parent_effect_id; ///< Id of the parent effect (if there is one) bool has_video; ///< Determines if this effect manipulates the image of a frame bool has_audio; ///< Determines if this effect manipulates the audio of a frame + bool has_tracked_object; ///< Determines if this effect track objects through the clip }; /** @@ -73,6 +76,12 @@ namespace openshot public: + /// Parent effect (which properties will set this effect properties) + EffectBase* parentEffect; + + /// Map of Tracked Object's by their indices (used by Effects that track objects on clips) + std::map > trackedObjects; + /// Information about the current effect EffectInfoStruct info; @@ -92,11 +101,28 @@ namespace openshot /// Set parent clip object of this effect void ParentClip(openshot::ClipBase* new_clip); + /// Set the parent effect from which this properties will be set to + void SetParentEffect(std::string parentEffect_id); + + /// Return the ID of this effect's parent clip + std::string ParentClipId() const; + + /// Get the indexes and IDs of all visible objects in the given frame + virtual std::string GetVisibleObjects(int64_t frame_number) const {return {}; }; + // Get and Set JSON methods virtual std::string Json() const = 0; ///< Generate JSON string of this object - virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object + virtual void SetJson(std::string value) = 0; ///< Load JSON string into this object virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object - virtual void SetJsonValue(const Json::Value root) = 0; ///< Load Json::Value into this object + virtual void SetJsonValue(Json::Value root) = 0; ///< Load Json::Value into this object + + virtual std::string Json(int64_t requested_frame) const{ + return {}; + }; + virtual void SetJson(int64_t requested_frame, const std::string value) { + return; + }; + Json::Value JsonInfo() const; ///< Generate JSON object of meta data / info /// Get the order that this effect should be executed. diff --git a/src/Fraction.cpp b/src/Fraction.cpp index 86671e117..d48983262 100644 --- a/src/Fraction.cpp +++ b/src/Fraction.cpp @@ -56,7 +56,7 @@ float Fraction::ToFloat() { } // Return this fraction as a double (i.e. 1/2 = 0.5) -double Fraction::ToDouble() { +double Fraction::ToDouble() const { return double(num) / double(den); } @@ -91,7 +91,7 @@ void Fraction::Reduce() { } // Return the reciprocal as a new Fraction -Fraction Fraction::Reciprocal() +Fraction Fraction::Reciprocal() const { // flip the fraction return Fraction(den, num); diff --git a/src/Fraction.h b/src/Fraction.h index ffb0640be..fb36e88b5 100644 --- a/src/Fraction.h +++ b/src/Fraction.h @@ -75,13 +75,13 @@ namespace openshot { float ToFloat(); /// Return this fraction as a double (i.e. 1/2 = 0.5) - double ToDouble(); + double ToDouble() const; /// Return a rounded integer of the fraction (for example 30000/1001 returns 30) int ToInt(); /// Return the reciprocal as a Fraction - Fraction Reciprocal(); + Fraction Reciprocal() const; }; diff --git a/src/KeyFrame.cpp b/src/KeyFrame.cpp index 8e4cfa69c..4107f2def 100644 --- a/src/KeyFrame.cpp +++ b/src/KeyFrame.cpp @@ -31,21 +31,24 @@ #include "KeyFrame.h" #include "Exceptions.h" -#include // For assert() -#include // For std::cout -#include // For std::setprecision #include #include #include +#include // For assert() +#include // For std::cout +#include // For std::setprecision using namespace std; using namespace openshot; -namespace { +namespace openshot{ + + // Check if the X coordinate of a given Point is lower than a given value bool IsPointBeforeX(Point const & p, double const x) { return p.co.X < x; } + // Linear interpolation between two points double InterpolateLinearCurve(Point const & left, Point const & right, double const target) { double const diff_Y = right.co.Y - left.co.Y; double const diff_X = right.co.X - left.co.X; @@ -53,6 +56,7 @@ namespace { return left.co.Y + slope * (target - left.co.X); } + // Bezier interpolation between two points double InterpolateBezierCurve(Point const & left, Point const & right, double const target, double const allowed_error) { double const X_diff = right.co.X - left.co.X; double const Y_diff = right.co.Y - left.co.Y; @@ -88,11 +92,16 @@ namespace { t_step /= 2; } while (true); } - - + // Interpolate two points using the right Point's interpolation method double InterpolateBetween(Point const & left, Point const & right, double target, double allowed_error) { - assert(left.co.X < target); - assert(target <= right.co.X); + // check if target is outside of the extremities poits + // This can occur when moving fast the play head + if(left.co.X > target){ + return left.co.Y; + } + if(target > right.co.X){ + return right.co.Y; + } switch (right.interpolation) { case CONSTANT: return left.co.Y; case LINEAR: return InterpolateLinearCurve(left, right, target); @@ -100,26 +109,24 @@ namespace { default: return InterpolateLinearCurve(left, right, target); } } +} - - template - int64_t SearchBetweenPoints(Point const & left, Point const & right, int64_t const current, Check check) { - int64_t start = left.co.X; - int64_t stop = right.co.X; - while (start < stop) { - int64_t const mid = (start + stop + 1) / 2; - double const value = InterpolateBetween(left, right, mid, 0.01); - if (check(round(value), current)) { - start = mid; - } else { - stop = mid - 1; - } +template +int64_t SearchBetweenPoints(Point const & left, Point const & right, int64_t const current, Check check) { + int64_t start = left.co.X; + int64_t stop = right.co.X; + while (start < stop) { + int64_t const mid = (start + stop + 1) / 2; + double const value = InterpolateBetween(left, right, mid, 0.01); + if (check(round(value), current)) { + start = mid; + } else { + stop = mid - 1; } - return start; } + return start; } - // Constructor which sets the default point & coordinate at X=1 Keyframe::Keyframe(double value) { // Add initial point @@ -543,6 +550,7 @@ void Keyframe::RemovePoint(int64_t index) { throw OutOfBoundsPoint("Invalid point requested", index, Points.size()); } +// Replace an existing point with a new point void Keyframe::UpdatePoint(int64_t index, Point p) { // Remove matching point RemovePoint(index); diff --git a/src/KeyFrame.h b/src/KeyFrame.h index bcfbf6660..6da34cace 100644 --- a/src/KeyFrame.h +++ b/src/KeyFrame.h @@ -40,6 +40,18 @@ namespace openshot { + /// Check if the X coordinate of a given Point is lower than a given value + bool IsPointBeforeX(Point const & p, double const x); + + /// Linear interpolation between two points + double InterpolateLinearCurve(Point const & left, Point const & right, double const target); + + /// Bezier interpolation between two points + double InterpolateBezierCurve(Point const & left, Point const & right, double const target, double const allowed_error); + + /// Interpolate two points using the right Point's interpolation method + double InterpolateBetween(Point const & left, Point const & right, double target, double allowed_error); + /** * @brief A Keyframe is a collection of Point instances, which is used to vary a number or property over time. * @@ -58,8 +70,10 @@ namespace openshot { * \endcode */ class Keyframe { + + private: - std::vector Points; ///< Vector of all Points + std::vector Points; ///< Vector of all Points public: /// Default constructor for the Keyframe class diff --git a/src/OpenShot.h b/src/OpenShot.h index a01a65ae5..f6c80c42c 100644 --- a/src/OpenShot.h +++ b/src/OpenShot.h @@ -146,6 +146,7 @@ #include "CVStabilization.h" #include "CVTracker.h" #include "CVObjectDetection.h" + #include "TrackedObjectBBox.h" #endif #endif diff --git a/src/ProcessingController.h b/src/ProcessingController.h index 98c77888b..24d501e39 100644 --- a/src/ProcessingController.h +++ b/src/ProcessingController.h @@ -2,6 +2,7 @@ * @file * @brief This is a message class for thread safe comunication between ClipProcessingJobs and OpenCV classes * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ diff --git a/src/Timeline.cpp b/src/Timeline.cpp index f7e5f4407..c2aea38d7 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -243,6 +243,115 @@ Timeline::~Timeline() { } } +// Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox) +void Timeline::AddTrackedObject(std::shared_ptr trackedObject){ + + // Search for the tracked object on the map + auto iterator = tracked_objects.find(trackedObject->Id()); + + if (iterator != tracked_objects.end()){ + // Tracked object's id already present on the map, overwrite it + iterator->second = trackedObject; + } + else{ + // Tracked object's id not present -> insert it on the map + tracked_objects[trackedObject->Id()] = trackedObject; + } + + return; +} + +// Return tracked object pointer by it's id +std::shared_ptr Timeline::GetTrackedObject(std::string id) const{ + + // Search for the tracked object on the map + auto iterator = tracked_objects.find(id); + + if (iterator != tracked_objects.end()){ + // Id found, return the pointer to the tracked object + std::shared_ptr trackedObject = iterator->second; + return trackedObject; + } + else { + // Id not found, return a null pointer + return nullptr; + } +} + +// Return the ID's of the tracked objects as a list of strings +std::list Timeline::GetTrackedObjectsIds() const{ + + // Create a list of strings + std::list trackedObjects_ids; + + // Iterate through the tracked_objects map + for (auto const& it: tracked_objects){ + // Add the IDs to the list + trackedObjects_ids.push_back(it.first); + } + + return trackedObjects_ids; +} + +#ifdef USE_OPENCV +// Return the trackedObject's properties as a JSON string +std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const { + + // Initialize the JSON object + Json::Value trackedObjectJson; + + // Search for the tracked object on the map + auto iterator = tracked_objects.find(id); + + if (iterator != tracked_objects.end()) + { + // Id found, Get the object pointer and cast it as a TrackedObjectBBox + std::shared_ptr trackedObject = std::static_pointer_cast(iterator->second); + + // Get the trackedObject values for it's first frame + if (trackedObject->ExactlyContains(frame_number)){ + BBox box = trackedObject->GetBox(frame_number); + float x1 = box.cx - (box.width/2); + float y1 = box.cy - (box.height/2); + float x2 = box.cx + (box.width/2); + float y2 = box.cy + (box.height/2); + float rotation = box.angle; + + trackedObjectJson["x1"] = x1; + trackedObjectJson["y1"] = y1; + trackedObjectJson["x2"] = x2; + trackedObjectJson["y2"] = y2; + trackedObjectJson["rotation"] = rotation; + + } else { + BBox box = trackedObject->BoxVec.begin()->second; + float x1 = box.cx - (box.width/2); + float y1 = box.cy - (box.height/2); + float x2 = box.cx + (box.width/2); + float y2 = box.cy + (box.height/2); + float rotation = box.angle; + + trackedObjectJson["x1"] = x1; + trackedObjectJson["y1"] = y1; + trackedObjectJson["x2"] = x2; + trackedObjectJson["y2"] = y2; + trackedObjectJson["rotation"] = rotation; + } + + } + else { + // Id not found, return all 0 values + trackedObjectJson["x1"] = 0; + trackedObjectJson["y1"] = 0; + trackedObjectJson["x2"] = 0; + trackedObjectJson["y2"] = 0; + trackedObjectJson["rotation"] = 0; + } + + return trackedObjectJson.toStyledString(); +} +#endif + // Add an openshot::Clip to the timeline void Timeline::AddClip(Clip* clip) { @@ -327,6 +436,25 @@ openshot::EffectBase* Timeline::GetClipEffect(const std::string& id) return nullptr; } +// Return the list of effects on all clips +std::list Timeline::ClipEffects() const { + + // Initialize the list + std::list timelineEffectsList; + + // Loop through all clips + for (const auto& clip : clips) { + + // Get the clip's list of effects + std::list clipEffectsList = clip->Effects(); + + // Append the clip's effects to the list + timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end()); + } + + return timelineEffectsList; +} + // Compute the end time of the latest timeline element double Timeline::GetMaxTime() { double last_clip = 0.0; @@ -677,6 +805,18 @@ std::shared_ptr Timeline::GetFrame(int64_t requested_frame) return frame; } + // Check if previous frame was cached? (if not, assume we are seeking somewhere else on the Timeline, and need + // to clear all cache (for continuity sake). For example, jumping back to a previous spot can cause issues with audio + // data where the new jump location doesn't match up with the previously cached audio data. + std::shared_ptr previous_frame = final_cache->GetFrame(requested_frame - 1); + if (!previous_frame) { + // Seeking to new place on timeline (destroy cache) + ClearAllCache(); + } + + // Minimum number of frames to process (for performance reasons) + int minimum_frames = OPEN_MP_NUM_PROCESSORS; + // Get a list of clips that intersect with the requested section of timeline // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing' std::vector nearby_clips; @@ -920,6 +1060,13 @@ void Timeline::SetJsonValue(const Json::Value root) { // Create Clip Clip *c = new Clip(); + // When a clip is attached to an object, it searches for the object + // on it's parent timeline. Setting the parent timeline of the clip here + // allows attaching it to an object when exporting the project (because) + // the exporter script initializes the clip and it's effects + // before setting it's parent timeline. + c->ParentTimeline(this); + // Load Json into Clip c->SetJsonValue(existing_clip); @@ -1386,4 +1533,4 @@ void Timeline::SetMaxSize(int width, int height) { // Update timeline cache size final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, preview_width, preview_height, info.sample_rate, info.channels); -} +} \ No newline at end of file diff --git a/src/Timeline.h b/src/Timeline.h index 3b3b49c3d..3a696bfa4 100644 --- a/src/Timeline.h +++ b/src/Timeline.h @@ -38,6 +38,7 @@ #include #include #include + #include "TimelineBase.h" #include "ReaderBase.h" @@ -47,6 +48,11 @@ #include "Fraction.h" #include "Frame.h" #include "KeyFrame.h" +#ifdef USE_OPENCV +#include "TrackedObjectBBox.h" +#endif +#include "TrackedObjectBase.h" + namespace openshot { @@ -174,6 +180,8 @@ namespace openshot { std::mutex get_frame_mutex; ///< Mutex to protect GetFrame method from different threads calling it int max_concurrent_frames; ///< Max concurrent frames to process at one time + std::map> tracked_objects; ///< map of TrackedObjectBBoxes and their IDs + /// Process a new layer of video or audio void add_layer(std::shared_ptr new_frame, openshot::Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume); @@ -238,6 +246,17 @@ namespace openshot { virtual ~Timeline(); + /// Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox) + void AddTrackedObject(std::shared_ptr trackedObject); + /// Return tracked object pointer by it's id + std::shared_ptr GetTrackedObject(std::string id) const; + /// Return the ID's of the tracked objects as a list of strings + std::list GetTrackedObjectsIds() const; + /// Return the trackedObject's properties as a JSON string + #ifdef USE_OPENCV + std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const; + #endif + /// @brief Add an openshot::Clip to the timeline /// @param clip Add an openshot::Clip to the timeline. A clip can contain any type of Reader. void AddClip(openshot::Clip* clip); @@ -284,6 +303,9 @@ namespace openshot { /// Return the list of effects on the timeline std::list Effects() { return effects; }; + /// Return the list of effects on all clips + std::list ClipEffects() const; + /// Get the cache object used by this reader openshot::CacheBase* GetCache() override { return final_cache; }; diff --git a/src/TimelineBase.cpp b/src/TimelineBase.cpp index 4356167f5..120de01b1 100644 --- a/src/TimelineBase.cpp +++ b/src/TimelineBase.cpp @@ -39,3 +39,11 @@ TimelineBase::TimelineBase() preview_width = 1920; preview_height = 1080; } + +/* This function will be overloaded in the Timeline class passing no arguments +* so we'll be able to access the Timeline::Clips() function from a pointer object of +* the TimelineBase class +*/ +void TimelineBase::Clips(int test){ + return; +} \ No newline at end of file diff --git a/src/TimelineBase.h b/src/TimelineBase.h index 6a8d0762c..0fc70c745 100644 --- a/src/TimelineBase.h +++ b/src/TimelineBase.h @@ -59,6 +59,11 @@ namespace openshot { /// Constructor for the base timeline TimelineBase(); + + /// This function will be overloaded in the Timeline class passing no arguments + /// so we'll be able to access the Timeline::Clips() function from a pointer object of + /// the TimelineBase class + virtual void Clips(int test); }; } diff --git a/src/TrackedObjectBBox.cpp b/src/TrackedObjectBBox.cpp new file mode 100644 index 000000000..cc8be1872 --- /dev/null +++ b/src/TrackedObjectBBox.cpp @@ -0,0 +1,577 @@ +/** + * @file + * @brief Source file for the TrackedObjectBBox class + * @author Jonathan Thomas + * @author Brenno Caldato + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "TrackedObjectBBox.h" +#include "Clip.h" +#include +#include +#include +#include + +using namespace std; +using namespace openshot; + + +// Default Constructor that sets the bounding-box displacement as 0 and the scales as 1 for the first frame +TrackedObjectBBox::TrackedObjectBBox() : delta_x(0.0), delta_y(0.0), scale_x(1.0), scale_y(1.0), rotation(0.0), + stroke_width(2) , background_alpha(1.0), stroke_alpha(0.0), background_corner(0), + stroke(0,0,255,0), background(0,0,255,0) +{ + this->TimeScale = 1.0; + return; +} + +TrackedObjectBBox::TrackedObjectBBox(int Red, int Green, int Blue, int Alfa) : delta_x(0.0), delta_y(0.0), scale_x(1.0), scale_y(1.0), rotation(0.0), + stroke_width(2) , background_alpha(1.0), stroke_alpha(0.0), background_corner(0), + stroke(Red,Green,Blue,Alfa), background(0,0,255,0) +{ + this->TimeScale = 1.0; + return; +} + +// Add a BBox to the BoxVec map +void TrackedObjectBBox::AddBox(int64_t _frame_num, float _cx, float _cy, float _width, float _height, float _angle) +{ + // Check if the given frame number is valid + if (_frame_num < 0) + return; + + // Instantiate a new bounding-box + BBox newBBox = BBox(_cx, _cy, _width, _height, _angle); + + // Get the time of given frame + double time = this->FrameNToTime(_frame_num, 1.0); + // Create an iterator that points to the BoxVec pair indexed by the time of given frame + auto BBoxIterator = BoxVec.find(time); + + if (BBoxIterator != BoxVec.end()) + { + // There is a bounding-box indexed by the time of given frame, update-it + BBoxIterator->second = newBBox; + } + else + { + // There isn't a bounding-box indexed by the time of given frame, insert a new one + BoxVec.insert({time, newBBox}); + } +} + +// Get the size of BoxVec map +int64_t TrackedObjectBBox::GetLength() const +{ + if (BoxVec.empty()) + return 0; + if (BoxVec.size() == 1) + return 1; + return BoxVec.size(); +} + +// Check if there is a bounding-box in the given frame +bool TrackedObjectBBox::Contains(int64_t frame_num) const +{ + // Get the time of given frame + double time = this->FrameNToTime(frame_num, 1.0); + // Create an iterator that points to the BoxVec pair indexed by the time of given frame (or the closest time) + auto it = BoxVec.lower_bound(time); + if (it == BoxVec.end()){ + // BoxVec pair not found + return false; + } + return true; +} + +// Check if there is a bounding-box in the exact frame number +bool TrackedObjectBBox::ExactlyContains(int64_t frame_number) const +{ + // Get the time of given frame + double time = FrameNToTime(frame_number, 1.0); + // Create an iterator that points to the BoxVec pair indexed by the exact time of given frame + auto it = BoxVec.find(time); + if (it == BoxVec.end()){ + // BoxVec pair not found + return false; + } + return true; +} + +// Remove a bounding-box from the BoxVec map +void TrackedObjectBBox::RemoveBox(int64_t frame_number) +{ + // Get the time of given frame + double time = this->FrameNToTime(frame_number, 1.0); + // Create an iterator that points to the BoxVec pair indexed by the time of given frame + auto it = BoxVec.find(time); + if (it != BoxVec.end()) + { + // The BoxVec pair exists, so remove it + BoxVec.erase(time); + } + return; +} + +// Return a bounding-box from BoxVec with it's properties adjusted by the Keyframes +BBox TrackedObjectBBox::GetBox(int64_t frame_number) +{ + // Get the time position of the given frame. + double time = this->FrameNToTime(frame_number, this->TimeScale); + + // Return a iterator pointing to the BoxVec pair indexed by time or to the pair indexed + // by the closest upper time value. + auto currentBBoxIterator = BoxVec.lower_bound(time); + + // Check if there is a pair indexed by time, returns an empty bbox if there isn't. + if (currentBBoxIterator == BoxVec.end()) + { + // Create and return an empty bounding-box object + BBox emptyBBox; + return emptyBBox; + } + + // Check if the iterator matches a BBox indexed by time or points to the first element of BoxVec + if ((currentBBoxIterator->first == time) || (currentBBoxIterator == BoxVec.begin())) + { + // Get the BBox indexed by time + BBox currentBBox = currentBBoxIterator->second; + + // Adjust the BBox properties by the Keyframes values + currentBBox.cx += this->delta_x.GetValue(frame_number); + currentBBox.cy += this->delta_y.GetValue(frame_number); + currentBBox.width *= this->scale_x.GetValue(frame_number); + currentBBox.height *= this->scale_y.GetValue(frame_number); + currentBBox.angle += this->rotation.GetValue(frame_number); + + return currentBBox; + } + + // BBox indexed by the closest upper time + BBox currentBBox = currentBBoxIterator->second; + // BBox indexed by the closet lower time + BBox previousBBox = prev(currentBBoxIterator, 1)->second; + + // Interpolate a BBox in the middle of previousBBox and currentBBox + BBox interpolatedBBox = InterpolateBoxes(prev(currentBBoxIterator, 1)->first, currentBBoxIterator->first, + previousBBox, currentBBox, time); + + // Adjust the BBox properties by the Keyframes values + interpolatedBBox.cx += this->delta_x.GetValue(frame_number); + interpolatedBBox.cy += this->delta_y.GetValue(frame_number); + interpolatedBBox.width *= this->scale_x.GetValue(frame_number); + interpolatedBBox.height *= this->scale_y.GetValue(frame_number); + interpolatedBBox.angle += this->rotation.GetValue(frame_number); + + return interpolatedBBox; +} + +// Interpolate the bouding-boxes properties +BBox TrackedObjectBBox::InterpolateBoxes(double t1, double t2, BBox left, BBox right, double target) +{ + // Interpolate the x-coordinate of the center point + Point cx_left(t1, left.cx, openshot::InterpolationType::LINEAR); + Point cx_right(t2, right.cx, openshot::InterpolationType::LINEAR); + Point cx = InterpolateBetween(cx_left, cx_right, target, 0.01); + + // Interpolate de y-coordinate of the center point + Point cy_left(t1, left.cy, openshot::InterpolationType::LINEAR); + Point cy_right(t2, right.cy, openshot::InterpolationType::LINEAR); + Point cy = InterpolateBetween(cy_left, cy_right, target, 0.01); + + // Interpolate the width + Point width_left(t1, left.width, openshot::InterpolationType::LINEAR); + Point width_right(t2, right.width, openshot::InterpolationType::LINEAR); + Point width = InterpolateBetween(width_left, width_right, target, 0.01); + + // Interpolate the height + Point height_left(t1, left.height, openshot::InterpolationType::LINEAR); + Point height_right(t2, right.height, openshot::InterpolationType::LINEAR); + Point height = InterpolateBetween(height_left, height_right, target, 0.01); + + // Interpolate the rotation angle + Point angle_left(t1, left.angle, openshot::InterpolationType::LINEAR); + Point angle_right(t1, right.angle, openshot::InterpolationType::LINEAR); + Point angle = InterpolateBetween(angle_left, angle_right, target, 0.01); + + // Create a bounding box with the interpolated points + BBox interpolatedBox(cx.co.Y, cy.co.Y, width.co.Y, height.co.Y, angle.co.Y); + + return interpolatedBox; +} + +// Update object's BaseFps +void TrackedObjectBBox::SetBaseFPS(Fraction fps){ + this->BaseFps = fps; + return; +} + +// Return the object's BaseFps +Fraction TrackedObjectBBox::GetBaseFPS(){ + return BaseFps; +} + +// Get the time of the given frame +double TrackedObjectBBox::FrameNToTime(int64_t frame_number, double time_scale) const{ + double time = ((double)frame_number) * this->BaseFps.Reciprocal().ToDouble() * (1.0 / time_scale); + + return time; +} + +// Update the TimeScale member variable +void TrackedObjectBBox::ScalePoints(double time_scale){ + this->TimeScale = time_scale; +} + +// Load the bounding-boxes information from the protobuf file +bool TrackedObjectBBox::LoadBoxData(std::string inputFilePath) +{ + // Variable to hold the loaded data + pb_tracker::Tracker bboxMessage; + + // Read the existing tracker message. + fstream input(inputFilePath, ios::in | ios::binary); + + // Check if it was able to read the protobuf data + if (!bboxMessage.ParseFromIstream(&input)) + { + cerr << "Failed to parse protobuf message." << endl; + return false; + } + + this->clear(); + + // Iterate over all frames of the saved message + for (size_t i = 0; i < bboxMessage.frame_size(); i++) + { + // Get data of the i-th frame + const pb_tracker::Frame &pbFrameData = bboxMessage.frame(i); + + // Get frame number + size_t frame_number = pbFrameData.id(); + + // Get bounding box data from current frame + const pb_tracker::Frame::Box &box = pbFrameData.bounding_box(); + + float width = box.x2() - box.x1(); + float height = box.y2() - box.y1(); + float cx = box.x1() + width/2; + float cy = box.y1() + height/2; + float angle = 0.0; + + + if ( (cx >= 0.0) && (cy >= 0.0) && (width >= 0.0) && (height >= 0.0) ) + { + // The bounding-box properties are valid, so add it to the BoxVec map + this->AddBox(frame_number, cx, cy, width, height, angle); + } + } + + // Show the time stamp from the last update in tracker data file + if (bboxMessage.has_last_updated()) + { + cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(bboxMessage.last_updated()) << endl; + } + + // Delete all global objects allocated by libprotobuf. + google::protobuf::ShutdownProtobufLibrary(); + + return true; +} + +// Clear the BoxVec map +void TrackedObjectBBox::clear() +{ + BoxVec.clear(); +} + +// Generate JSON string of this object +std::string TrackedObjectBBox::Json() const +{ + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::Value for this object +Json::Value TrackedObjectBBox::JsonValue() const +{ + // Create root json object + Json::Value root; + + // Object's properties + root["box_id"] = Id(); + root["BaseFPS"]["num"] = BaseFps.num; + root["BaseFPS"]["den"] = BaseFps.den; + root["TimeScale"] = TimeScale; + root["child_clip_id"] = ChildClipId(); + + // Keyframe's properties + root["delta_x"] = delta_x.JsonValue(); + root["delta_y"] = delta_y.JsonValue(); + root["scale_x"] = scale_x.JsonValue(); + root["scale_y"] = scale_y.JsonValue(); + root["rotation"] = rotation.JsonValue(); + root["visible"] = visible.JsonValue(); + root["draw_box"] = draw_box.JsonValue(); + root["stroke"] = stroke.JsonValue(); + root["background_alpha"] = background_alpha.JsonValue(); + root["background_corner"] = background_corner.JsonValue(); + root["background"] = background.JsonValue(); + root["stroke_width"] = stroke_width.JsonValue(); + root["stroke_alpha"] = stroke_alpha.JsonValue(); + + // return JsonValue + return root; +} + +// Load JSON string into this object +void TrackedObjectBBox::SetJson(const std::string value) +{ + // Parse JSON string into JSON objects + try + { + const Json::Value root = openshot::stringToJson(value); + // Set all values that match + SetJsonValue(root); + } + catch (const std::exception &e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); + } + return; +} + +// Load Json::Value into this object +void TrackedObjectBBox::SetJsonValue(const Json::Value root) +{ + + // Set the Id by the given JSON object + if (!root["box_id"].isNull() && root["box_id"].asString() != "") + Id(root["box_id"].asString()); + + // Set the BaseFps by the given JSON object + if (!root["BaseFPS"].isNull() && root["BaseFPS"].isObject()) + { + if (!root["BaseFPS"]["num"].isNull()) + BaseFps.num = (int)root["BaseFPS"]["num"].asInt(); + if (!root["BaseFPS"]["den"].isNull()) + BaseFps.den = (int)root["BaseFPS"]["den"].asInt(); + } + // Set the TimeScale by the given JSON object + if (!root["TimeScale"].isNull()) + { + double scale = (double)root["TimeScale"].asDouble(); + this->ScalePoints(scale); + } + // Set the protobuf data path by the given JSON object + if (!root["protobuf_data_path"].isNull()) + protobufDataPath = root["protobuf_data_path"].asString(); + // Set the id of the child clip + if (!root["child_clip_id"].isNull() && root["child_clip_id"].asString() != ""){ + Clip* parentClip = (Clip *) ParentClip(); + ChildClipId(root["child_clip_id"].asString()); + } + + // Set the Keyframes by the given JSON object + if (!root["delta_x"].isNull()) + delta_x.SetJsonValue(root["delta_x"]); + if (!root["delta_y"].isNull()) + delta_y.SetJsonValue(root["delta_y"]); + if (!root["scale_x"].isNull()) + scale_x.SetJsonValue(root["scale_x"]); + if (!root["scale_y"].isNull()) + scale_y.SetJsonValue(root["scale_y"]); + if (!root["rotation"].isNull()) + rotation.SetJsonValue(root["rotation"]); + if (!root["visible"].isNull()) + visible.SetJsonValue(root["visible"]); + if (!root["draw_box"].isNull()) + draw_box.SetJsonValue(root["draw_box"]); + if (!root["stroke"].isNull()) + stroke.SetJsonValue(root["stroke"]); + if (!root["background_alpha"].isNull()) + background_alpha.SetJsonValue(root["background_alpha"]); + if (!root["background_corner"].isNull()) + background_corner.SetJsonValue(root["background_corner"]); + if (!root["background"].isNull()) + background.SetJsonValue(root["background"]); + if (!root["stroke_width"].isNull()) + stroke_width.SetJsonValue(root["stroke_width"]); + if (!root["stroke_alpha"].isNull()) + stroke_alpha.SetJsonValue(root["stroke_alpha"]); + return; +} + +// Get all properties for a specific frame (perfect for a UI to display the current state +// of all properties at any time) +Json::Value TrackedObjectBBox::PropertiesJSON(int64_t requested_frame) const +{ + Json::Value root; + + BBox box = GetBox(requested_frame); + + // Add the ID of this object to the JSON object + root["box_id"] = add_property_json("Box ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + + // Add the ID of this object's child clip to the JSON object + root["child_clip_id"] = add_property_json("Child Clip ID", 0.0, "string", ChildClipId(), NULL, -1, -1, false, requested_frame); + + // Add the data of given frame bounding-box to the JSON object + root["x1"] = add_property_json("X1", box.cx-(box.width/2), "float", "", NULL, 0.0, 1.0, false, requested_frame); + root["y1"] = add_property_json("Y1", box.cy-(box.height/2), "float", "", NULL, 0.0, 1.0, false, requested_frame); + root["x2"] = add_property_json("X2", box.cx+(box.width/2), "float", "", NULL, 0.0, 1.0, false, requested_frame); + root["y2"] = add_property_json("Y2", box.cy+(box.height/2), "float", "", NULL, 0.0, 1.0, false, requested_frame); + + // Add the bounding-box Keyframes to the JSON object + root["delta_x"] = add_property_json("Displacement X-axis", delta_x.GetValue(requested_frame), "float", "", &delta_x, -1.0, 1.0, false, requested_frame); + root["delta_y"] = add_property_json("Displacement Y-axis", delta_y.GetValue(requested_frame), "float", "", &delta_y, -1.0, 1.0, false, requested_frame); + root["scale_x"] = add_property_json("Scale (Width)", scale_x.GetValue(requested_frame), "float", "", &scale_x, 0.0, 1.0, false, requested_frame); + root["scale_y"] = add_property_json("Scale (Height)", scale_y.GetValue(requested_frame), "float", "", &scale_y, 0.0, 1.0, false, requested_frame); + root["rotation"] = add_property_json("Rotation", rotation.GetValue(requested_frame), "float", "", &rotation, 0, 360, false, requested_frame); + root["visible"] = add_property_json("Visible", visible.GetValue(requested_frame), "int", "", &visible, 0, 1, false, requested_frame); + + root["draw_box"] = add_property_json("Draw Box", draw_box.GetValue(requested_frame), "int", "", &draw_box, -1, 1.0, false, requested_frame); + root["draw_box"]["choices"].append(add_property_choice_json("Off", 0, draw_box.GetValue(requested_frame))); + root["draw_box"]["choices"].append(add_property_choice_json("On", 1, draw_box.GetValue(requested_frame))); + + root["stroke"] = add_property_json("Border", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["stroke"]["red"] = add_property_json("Red", stroke.red.GetValue(requested_frame), "float", "", &stroke.red, 0, 255, false, requested_frame); + root["stroke"]["blue"] = add_property_json("Blue", stroke.blue.GetValue(requested_frame), "float", "", &stroke.blue, 0, 255, false, requested_frame); + root["stroke"]["green"] = add_property_json("Green", stroke.green.GetValue(requested_frame), "float", "", &stroke.green, 0, 255, false, requested_frame); + root["stroke_width"] = add_property_json("Stroke Width", stroke_width.GetValue(requested_frame), "int", "", &stroke_width, 1, 10, false, requested_frame); + root["stroke_alpha"] = add_property_json("Stroke alpha", stroke_alpha.GetValue(requested_frame), "float", "", &stroke_alpha, 0.0, 1.0, false, requested_frame); + + root["background_alpha"] = add_property_json("Background Alpha", background_alpha.GetValue(requested_frame), "float", "", &background_alpha, 0.0, 1.0, false, requested_frame); + root["background_corner"] = add_property_json("Background Corner Radius", background_corner.GetValue(requested_frame), "int", "", &background_corner, 0.0, 60.0, false, requested_frame); + + root["background"] = add_property_json("Background", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["background"]["red"] = add_property_json("Red", background.red.GetValue(requested_frame), "float", "", &background.red, 0, 255, false, requested_frame); + root["background"]["blue"] = add_property_json("Blue", background.blue.GetValue(requested_frame), "float", "", &background.blue, 0, 255, false, requested_frame); + root["background"]["green"] = add_property_json("Green", background.green.GetValue(requested_frame), "float", "", &background.green, 0, 255, false, requested_frame); + + // Return formatted string + return root; +} + + +// Generate JSON for a property +Json::Value TrackedObjectBBox::add_property_json(std::string name, float value, std::string type, std::string memo, const Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame) const { + + // Requested Point + const Point requested_point(requested_frame, requested_frame); + + // Create JSON Object + Json::Value prop = Json::Value(Json::objectValue); + prop["name"] = name; + prop["value"] = value; + prop["memo"] = memo; + prop["type"] = type; + prop["min"] = min_value; + prop["max"] = max_value; + if (keyframe) { + prop["keyframe"] = keyframe->Contains(requested_point); + prop["points"] = int(keyframe->GetCount()); + Point closest_point = keyframe->GetClosestPoint(requested_point); + prop["interpolation"] = closest_point.interpolation; + prop["closest_point_x"] = closest_point.co.X; + prop["previous_point_x"] = keyframe->GetPreviousPoint(closest_point).co.X; + } + else { + prop["keyframe"] = false; + prop["points"] = 0; + prop["interpolation"] = CONSTANT; + prop["closest_point_x"] = -1; + prop["previous_point_x"] = -1; + } + + prop["readonly"] = readonly; + prop["choices"] = Json::Value(Json::arrayValue); + + // return JsonValue + return prop; +} + +// Return a map that contains the bounding box properties and it's keyframes indexed by their names +std::map TrackedObjectBBox::GetBoxValues(int64_t frame_number) const { + + // Create the map + std::map boxValues; + + // Get bounding box of the current frame + BBox box = GetBox(frame_number); + + // Save the bounding box properties + boxValues["cx"] = box.cx; + boxValues["cy"] = box.cy; + boxValues["w"] = box.width; + boxValues["h"] = box.height; + boxValues["ang"] = box.angle; + + // Save the keyframes values + boxValues["sx"] = this->scale_x.GetValue(frame_number); + boxValues["sy"] = this->scale_y.GetValue(frame_number); + boxValues["dx"] = this->delta_x.GetValue(frame_number); + boxValues["dy"] = this->delta_y.GetValue(frame_number); + boxValues["r"] = this->rotation.GetValue(frame_number); + + + return boxValues; +} + +// Return a map that contains the properties of this object's parent clip +std::map TrackedObjectBBox::GetParentClipProperties(int64_t frame_number) const { + + // Get the parent clip of this object as a Clip pointer + Clip* parentClip = (Clip *) ParentClip(); + + // Calculate parentClip's frame number + long parentClip_start_position = round( parentClip->Position() * parentClip->info.fps.ToDouble() ) + 1; + long parentClip_start_frame = ( parentClip->Start() * parentClip->info.fps.ToDouble() ) + 1; + float parentClip_frame_number = round(frame_number - parentClip_start_position) + parentClip_start_frame; + + // Get parentClip's Keyframes + float parentClip_location_x = parentClip->location_x.GetValue(parentClip_frame_number); + float parentClip_location_y = parentClip->location_y.GetValue(parentClip_frame_number); + float parentClip_scale_x = parentClip->scale_x.GetValue(parentClip_frame_number); + float parentClip_scale_y = parentClip->scale_y.GetValue(parentClip_frame_number); + float parentClip_rotation = parentClip->rotation.GetValue(parentClip_frame_number); + + // Initialize the parent clip properties map + std::map parentClipProperties; + + // Set the map properties + parentClipProperties["frame_number"] = parentClip_frame_number; + parentClipProperties["timeline_frame_number"] = frame_number; + parentClipProperties["location_x"] = parentClip_location_x; + parentClipProperties["location_y"] = parentClip_location_y; + parentClipProperties["scale_x"] = parentClip_scale_x; + parentClipProperties["scale_y"] = parentClip_scale_y; + parentClipProperties["rotation"] = parentClip_rotation; + + return parentClipProperties; +} \ No newline at end of file diff --git a/src/TrackedObjectBBox.h b/src/TrackedObjectBBox.h new file mode 100644 index 000000000..ae951e220 --- /dev/null +++ b/src/TrackedObjectBBox.h @@ -0,0 +1,249 @@ +/** + * @file + * @brief Header file for the TrackedObjectBBox class + * @author Jonathan Thomas + * @author Brenno Caldato + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_TRACKEDOBJECTBBOX_H +#define OPENSHOT_TRACKEDOBJECTBBOX_H + +#include +#include +#include +#include +#include +#include "Exceptions.h" +#include "Fraction.h" +#include "Coordinate.h" +#include "Point.h" +#include "Json.h" +#include "KeyFrame.h" +#include "TrackedObjectBase.h" +#include "Color.h" +#include "protobuf_messages/trackerdata.pb.h" +#include + + +using google::protobuf::util::TimeUtil; + +namespace openshot +{ + /** + * @brief This struct holds the information of a bounding-box. + * + * A bounding-box is a rectangular shape that enclosures an + * object or a desired set of pixels in a digital image. + * + * The bounding-box structure holds five floating-point properties: + * the x and y coordinates of the rectangle's center point (cx, cy), + * the rectangle's width, height and rotation. + */ + struct BBox + { + float cx = -1; ///< x-coordinate of the bounding box center + float cy = -1; ///< y-coordinate of the bounding box center + float width = -1; ///< bounding box width + float height = -1; ///< bounding box height + float angle = -1; ///< bounding box rotation angle [degrees] + + /// Blank constructor + BBox() {} + + /// Default constructor, which takes the bounding box top-left corner coordinates, width and height. + /// @param _cx X-coordinate of the bounding box center + /// @param _cy Y-coordinate of the bounding box center + /// @param _width Bounding box width + /// @param _height Bounding box height + /// @param _angle Bounding box rotation angle [degrees] + BBox(float _cx, float _cy, float _width, float _height, float _angle) + { + cx = _cx; + cy = _cy; + width = _width; + height = _height; + angle = _angle; + } + + + /// Generate JSON string of this object + std::string Json() const + { + return JsonValue().toStyledString(); + } + + /// Generate Json::Value for this object + Json::Value JsonValue() const + { + Json::Value root; + root["cx"] = cx; + root["cy"] = cy; + root["width"] = width; + root["height"] = height; + root["angle"] = angle; + + return root; + } + + /// Load JSON string into this object + void SetJson(const std::string value) + { + // Parse JSON string into JSON objects + try + { + const Json::Value root = openshot::stringToJson(value); + // Set all values that match + SetJsonValue(root); + } + catch (const std::exception &e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); + } + } + + /// Load Json::Value into this object + void SetJsonValue(const Json::Value root) + { + + // Set data from Json (if key is found) + if (!root["cx"].isNull()) + cx = root["cx"].asDouble(); + if (!root["cy"].isNull()) + cy = root["cy"].asDouble(); + if (!root["width"].isNull()) + width = root["width"].asDouble(); + if (!root["height"].isNull()) + height = root["height"].asDouble(); + if (!root["angle"].isNull()) + angle = root["angle"].asDouble(); + } + }; + + /** + * @brief This class contains the properties of a tracked object + * and functions to manipulate it. + * + * The bounding-box displacement in X and Y directions, it's width, + * height and rotation variation over the frames are set as + * openshot::Keyframe objects. + * + * The bounding-box information over the clip's frames are + * saved into a protobuf file and loaded into an + * object of this class. + */ + class TrackedObjectBBox : public TrackedObjectBase + { + private: + Fraction BaseFps; + double TimeScale; + + public: + std::map BoxVec; ///< Index the bounding-box by time of each frame + Keyframe delta_x; ///< X-direction displacement Keyframe + Keyframe delta_y; ///< Y-direction displacement Keyframe + Keyframe scale_x; ///< X-direction scale Keyframe + Keyframe scale_y; ///< Y-direction scale Keyframe + Keyframe rotation; ///< Rotation Keyframe + Keyframe background_alpha; ///< Background box opacity + Keyframe background_corner; ///< Radius of rounded corners + Keyframe stroke_width; ///< Thickness of border line + Keyframe stroke_alpha; ///< Stroke box opacity + Color stroke; ///< Border line color + Color background; ///< Background fill color + + std::string protobufDataPath; ///< Path to the protobuf file that holds the bounding box points across the frames + + /// Default Constructor + TrackedObjectBBox(); + TrackedObjectBBox(int Red, int Green, int Blue, int Alfa); + + /// Add a BBox to the BoxVec map + void AddBox(int64_t _frame_num, float _cx, float _cy, float _width, float _height, float _angle) override; + + /// Update object's BaseFps + void SetBaseFPS(Fraction fps); + + /// Return the object's BaseFps + Fraction GetBaseFPS(); + + /// Update the TimeScale member variable + void ScalePoints(double scale) override; + + /// Check if there is a bounding-box in the given frame + bool Contains(int64_t frame_number) const; + /// Check if there is a bounding-box in the exact frame number + bool ExactlyContains(int64_t frame_number) const override; + + /// Get the size of BoxVec map + int64_t GetLength() const; + + /// Remove a bounding-box from the BoxVec map + void RemoveBox(int64_t frame_number); + + /// Return a bounding-box from BoxVec with it's properties adjusted by the Keyframes + BBox GetBox(int64_t frame_number); + /// Const-cast of the GetBox function, so that it can be called inside other cont function + BBox GetBox(int64_t frame_number) const + { + return const_cast(this)->GetBox(frame_number); + } + + /// Load the bounding-boxes information from the protobuf file + bool LoadBoxData(std::string inputFilePath); + + /// Get the time of the given frame + double FrameNToTime(int64_t frame_number, double time_scale) const; + + /// Interpolate the bouding-boxes properties + BBox InterpolateBoxes(double t1, double t2, BBox left, BBox right, double target); + + /// Clear the BoxVec map + void clear(); + + /// Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJson(const std::string value) override; ///< Load JSON string into this object + void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + Json::Value PropertiesJSON(int64_t requested_frame) const override; + + // Generate JSON for a property + Json::Value add_property_json(std::string name, float value, std::string type, std::string memo, const Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame) const; + + /// Return a map that contains the bounding box properties and it's keyframes indexed by their names + std::map GetBoxValues(int64_t frame_number) const override; + /// Return a map that contains the properties of this object's parent clip + std::map GetParentClipProperties(int64_t frame_number) const override; + + }; +} // namespace openshot + +#endif \ No newline at end of file diff --git a/src/TrackedObjectBase.cpp b/src/TrackedObjectBase.cpp new file mode 100644 index 000000000..f065b10a9 --- /dev/null +++ b/src/TrackedObjectBase.cpp @@ -0,0 +1,66 @@ +/** + * @file + * @brief Source file for the TrackedObjectBase class + * @author Jonathan Thomas + * @author Brenno Caldato + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "TrackedObjectBase.h" +#include +#include +#include + +namespace openshot +{ + + // Blank constructor + TrackedObjectBase::TrackedObjectBase() : visible(1.0), draw_box(1) + { + // Initializes the id as "" + id = ""; + childClipId = ""; + } + + // Default constructor + TrackedObjectBase::TrackedObjectBase(std::string _id) : visible(1.0) + { + Id(_id); + childClipId = ""; + } + + Json::Value TrackedObjectBase::add_property_choice_json(std::string name, int value, int selected_value) const + { + // Create choice + Json::Value new_choice = Json::Value(Json::objectValue); + new_choice["name"] = name; + new_choice["value"] = value; + new_choice["selected"] = (value == selected_value); + + // return JsonValue + return new_choice; + } +} // namespace openshot \ No newline at end of file diff --git a/src/TrackedObjectBase.h b/src/TrackedObjectBase.h new file mode 100644 index 000000000..86278e637 --- /dev/null +++ b/src/TrackedObjectBase.h @@ -0,0 +1,116 @@ +/** + * @file + * @brief Header file for the TrackedObjectBase class + * @author Jonathan Thomas + * @author Brenno Caldato + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_TRACKEDOBJECTBASE_H +#define OPENSHOT_TRACKEDOBJECTBASE_H + +#include +#include +#include +#include +#include +#include +#include "Exceptions.h" +#include "Fraction.h" +#include "Coordinate.h" +#include "KeyFrame.h" +#include "Point.h" +#include "Json.h" +#include "ClipBase.h" + + +namespace openshot { + /** + * @brief This abstract class is the base class of all Tracked Objects. + * + * A Tracked Object is an object or a desired set of pixels in a digital image + * which properties (such as position, width and height) can be detected and + * predicted along the frames of a clip. + */ + class TrackedObjectBase { + protected: + std::string id; + std::string childClipId; + + ClipBase* parentClip; + + public: + + Keyframe visible; + Keyframe draw_box; + + /// Default constructor + TrackedObjectBase(); + + /// Constructor which takes an object ID + TrackedObjectBase(std::string _id); + + /// Get the id of this object + std::string Id() const { return id; } + /// Set the id of this object + void Id(std::string _id) { id = _id; } + /// Get and set the parentClip of this object + ClipBase* ParentClip() const { return parentClip; } + void ParentClip(ClipBase* clip) { parentClip = clip; } + /// Get and set the Id of the childClip of this object + std::string ChildClipId() const { return childClipId; }; + void ChildClipId(std::string _childClipId) { childClipId = _childClipId; }; + + /// Check if there is data for the exact frame number + virtual bool ExactlyContains(int64_t frame_number) const { return {}; }; + + /// Scale an object's property + virtual void ScalePoints(double scale) { return; }; + /// Return the main properties of a TrackedObjectBBox instance - such as position, size and rotation + virtual std::map GetBoxValues(int64_t frame_number) const { std::map ret; return ret; }; + /// Return the main properties of the tracked object's parent clip - such as position, size and rotation + virtual std::map GetParentClipProperties(int64_t frame_number) const { std::map ret; return ret; } + /// Add a bounding box to the tracked object's BoxVec map + virtual void AddBox(int64_t _frame_num, float _cx, float _cy, float _width, float _height, float _angle) { return; }; + + + /// Get and Set JSON methods + virtual std::string Json() const = 0; ///< Generate JSON string of this object + virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object + virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object + virtual void SetJsonValue(const Json::Value root) = 0; ///< Load Json::Value into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + virtual Json::Value PropertiesJSON(int64_t requested_frame) const = 0; + /// Generate JSON choice for a property (dropdown properties) + Json::Value add_property_choice_json(std::string name, int value, int selected_value) const; + + + }; +} // Namespace openshot + +#endif diff --git a/src/effects/Bars.cpp b/src/effects/Bars.cpp index 682b864db..6f80b94eb 100644 --- a/src/effects/Bars.cpp +++ b/src/effects/Bars.cpp @@ -196,6 +196,9 @@ std::string Bars::PropertiesJSON(int64_t requested_frame) const { root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 0.5, false, requested_frame); root["bottom"] = add_property_json("Bottom Size", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 0.5, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Blur.cpp b/src/effects/Blur.cpp index b301f1e85..169286cba 100644 --- a/src/effects/Blur.cpp +++ b/src/effects/Blur.cpp @@ -243,6 +243,9 @@ std::string Blur::PropertiesJSON(int64_t requested_frame) const { root["sigma"] = add_property_json("Sigma", sigma.GetValue(requested_frame), "float", "", &sigma, 0, 100, false, requested_frame); root["iterations"] = add_property_json("Iterations", iterations.GetValue(requested_frame), "float", "", &iterations, 0, 100, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Brightness.cpp b/src/effects/Brightness.cpp index dbb80f376..4b80f9d78 100644 --- a/src/effects/Brightness.cpp +++ b/src/effects/Brightness.cpp @@ -163,6 +163,9 @@ std::string Brightness::PropertiesJSON(int64_t requested_frame) const { root["brightness"] = add_property_json("Brightness", brightness.GetValue(requested_frame), "float", "", &brightness, -1.0, 1.0, false, requested_frame); root["contrast"] = add_property_json("Contrast", contrast.GetValue(requested_frame), "float", "", &contrast, 0.0, 100.0, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Caption.cpp b/src/effects/Caption.cpp index 83951d755..8d9bec92e 100644 --- a/src/effects/Caption.cpp +++ b/src/effects/Caption.cpp @@ -429,6 +429,9 @@ std::string Caption::PropertiesJSON(int64_t requested_frame) const { root["caption_text"] = add_property_json("Captions", 0.0, "caption", caption_text, NULL, -1, -1, false, requested_frame); root["caption_font"] = add_property_json("Font", 0.0, "font", font_name, NULL, -1, -1, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/ChromaKey.cpp b/src/effects/ChromaKey.cpp index 9895d9366..7eb4aa34a 100644 --- a/src/effects/ChromaKey.cpp +++ b/src/effects/ChromaKey.cpp @@ -176,6 +176,9 @@ std::string ChromaKey::PropertiesJSON(int64_t requested_frame) const { root["color"]["green"] = add_property_json("Green", color.green.GetValue(requested_frame), "float", "", &color.green, 0, 255, false, requested_frame); root["fuzz"] = add_property_json("Fuzz", fuzz.GetValue(requested_frame), "float", "", &fuzz, 0, 125, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/ColorShift.cpp b/src/effects/ColorShift.cpp index 1140d0253..6b1323e3f 100644 --- a/src/effects/ColorShift.cpp +++ b/src/effects/ColorShift.cpp @@ -282,6 +282,9 @@ std::string ColorShift::PropertiesJSON(int64_t requested_frame) const { root["alpha_x"] = add_property_json("Alpha X Shift", alpha_x.GetValue(requested_frame), "float", "", &alpha_x, -1, 1, false, requested_frame); root["alpha_y"] = add_property_json("Alpha Y Shift", alpha_y.GetValue(requested_frame), "float", "", &alpha_y, -1, 1, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Crop.cpp b/src/effects/Crop.cpp index 6b3c6f8a6..bf89b2575 100644 --- a/src/effects/Crop.cpp +++ b/src/effects/Crop.cpp @@ -189,6 +189,9 @@ std::string Crop::PropertiesJSON(int64_t requested_frame) const { root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 1.0, false, requested_frame); root["bottom"] = add_property_json("Bottom Size", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 1.0, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Deinterlace.cpp b/src/effects/Deinterlace.cpp index 6d4dfa152..870f4a887 100644 --- a/src/effects/Deinterlace.cpp +++ b/src/effects/Deinterlace.cpp @@ -162,6 +162,9 @@ std::string Deinterlace::PropertiesJSON(int64_t requested_frame) const { root["isOdd"]["choices"].append(add_property_choice_json("Yes", true, isOdd)); root["isOdd"]["choices"].append(add_property_choice_json("No", false, isOdd)); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Hue.cpp b/src/effects/Hue.cpp index f2c50c62b..29db7c53e 100644 --- a/src/effects/Hue.cpp +++ b/src/effects/Hue.cpp @@ -164,6 +164,9 @@ std::string Hue::PropertiesJSON(int64_t requested_frame) const { // Keyframes root["hue"] = add_property_json("Hue", hue.GetValue(requested_frame), "float", "", &hue, 0.0, 1.0, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Mask.cpp b/src/effects/Mask.cpp index 75e38a6ad..0d5d335e0 100644 --- a/src/effects/Mask.cpp +++ b/src/effects/Mask.cpp @@ -293,6 +293,9 @@ std::string Mask::PropertiesJSON(int64_t requested_frame) const { else root["reader"] = add_property_json("Source", 0.0, "reader", "{}", NULL, 0, 1, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Negate.cpp b/src/effects/Negate.cpp index c6772b31d..50db5f2f9 100644 --- a/src/effects/Negate.cpp +++ b/src/effects/Negate.cpp @@ -113,6 +113,9 @@ std::string Negate::PropertiesJSON(int64_t requested_frame) const { root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 30 * 60 * 60 * 48, false, requested_frame); root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 30 * 60 * 60 * 48, true, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/ObjectDetection.cpp b/src/effects/ObjectDetection.cpp index 8f5d3ca14..130934757 100644 --- a/src/effects/ObjectDetection.cpp +++ b/src/effects/ObjectDetection.cpp @@ -2,6 +2,7 @@ * @file * @brief Source file for Object Detection effect class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -34,40 +35,51 @@ #include "effects/ObjectDetection.h" #include "effects/Tracker.h" #include "Exceptions.h" +#include "Timeline.h" +#include +#include +#include using namespace std; using namespace openshot; + /// Blank constructor, useful when using Json to load the effect properties ObjectDetection::ObjectDetection(std::string clipObDetectDataPath) { // Init effect properties - init_effect_details(); + init_effect_details(); // Tries to load the tracker data from protobuf LoadObjDetectdData(clipObDetectDataPath); + + // Initialize the selected object index as the first object index + selectedObjectIndex = trackedObjects.begin()->first; } // Default constructor ObjectDetection::ObjectDetection() { - // Init effect properties - init_effect_details(); + // Init effect properties + init_effect_details(); + // Initialize the selected object index as the first object index + selectedObjectIndex = trackedObjects.begin()->first; } // Init effect settings void ObjectDetection::init_effect_details() { - /// Initialize the values of the EffectInfo struct. - InitEffectInfo(); - - /// Set the effect info - info.class_name = "Object Detector"; - info.name = "Object Detector"; - info.description = "Detect objects through the video."; - info.has_audio = false; - info.has_video = true; + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Object Detector"; + info.name = "Object Detector"; + info.description = "Detect objects through the video."; + info.has_audio = false; + info.has_video = true; + info.has_tracked_object = true; } // This method is required for all derived classes of EffectBase, and returns a @@ -75,13 +87,18 @@ void ObjectDetection::init_effect_details() std::shared_ptr ObjectDetection::GetFrame(std::shared_ptr frame, int64_t frame_number) { // Get the frame's image - cv::Mat cv_image = frame->GetImageCV(); + cv::Mat cv_image = frame->GetImageCV(); // Check if frame isn't NULL if(cv_image.empty()){ return frame; } + // Initialize the Qt rectangle that will hold the positions of the bounding-box + std::vector boxRects; + // Initialize the image of the TrackedObject child clip + std::vector> childClipImages; + // Check if track data exists for the requested frame if (detectionsData.find(frame_number) != detectionsData.end()) { float fw = cv_image.size().width; @@ -89,46 +106,191 @@ std::shared_ptr ObjectDetection::GetFrame(std::shared_ptr frame, i DetectionData detections = detectionsData[frame_number]; for(int i = 0; i bb_nrml = detections.boxes.at(i); - cv::Rect2d box((int)(bb_nrml.x*fw), - (int)(bb_nrml.y*fh), - (int)(bb_nrml.width*fw), - (int)(bb_nrml.height*fh)); - drawPred(detections.classIds.at(i), detections.confidences.at(i), - box, cv_image); + + // Does not show boxes with confidence below the threshold + if(detections.confidences.at(i) < confidence_threshold){ + continue; + } + // Just display selected classes + if( display_classes.size() > 0 && + std::find(display_classes.begin(), display_classes.end(), classNames[detections.classIds.at(i)]) == display_classes.end()){ + continue; + } + + // Get the object id + int objectId = detections.objectIds.at(i); + + // Search for the object in the trackedObjects map + auto trackedObject_it = trackedObjects.find(objectId); + + // Cast the object as TrackedObjectBBox + std::shared_ptr trackedObject = std::static_pointer_cast(trackedObject_it->second); + + // Check if the tracked object has data for this frame + if (trackedObject->Contains(frame_number) && + trackedObject->visible.GetValue(frame_number) == 1) + { + // Get the bounding-box of given frame + BBox trackedBox = trackedObject->GetBox(frame_number); + bool draw_text = !display_box_text.GetValue(frame_number); + std::vector stroke_rgba = trackedObject->stroke.GetColorRGBA(frame_number); + int stroke_width = trackedObject->stroke_width.GetValue(frame_number); + float stroke_alpha = trackedObject->stroke_alpha.GetValue(frame_number); + std::vector bg_rgba = trackedObject->background.GetColorRGBA(frame_number); + float bg_alpha = trackedObject->background_alpha.GetValue(frame_number); + + // Create a rotated rectangle object that holds the bounding box + // cv::RotatedRect box ( cv::Point2f( (int)(trackedBox.cx*fw), (int)(trackedBox.cy*fh) ), + // cv::Size2f( (int)(trackedBox.width*fw), (int)(trackedBox.height*fh) ), + // (int) (trackedBox.angle) ); + + // DrawRectangleRGBA(cv_image, box, bg_rgba, bg_alpha, 1, true); + // DrawRectangleRGBA(cv_image, box, stroke_rgba, stroke_alpha, stroke_width, false); + + cv::Rect2d box( + (int)( (trackedBox.cx-trackedBox.width/2)*fw), + (int)( (trackedBox.cy-trackedBox.height/2)*fh), + (int)( trackedBox.width*fw), + (int)( trackedBox.height*fh) + ); + drawPred(detections.classIds.at(i), detections.confidences.at(i), + box, cv_image, detections.objectIds.at(i), bg_rgba, bg_alpha, 1, true, draw_text); + drawPred(detections.classIds.at(i), detections.confidences.at(i), + box, cv_image, detections.objectIds.at(i), stroke_rgba, stroke_alpha, stroke_width, false, draw_text); + + + // Get the Detected Object's child clip + if (trackedObject->ChildClipId() != ""){ + // Cast the parent timeline of this effect + Timeline* parentTimeline = (Timeline *) ParentTimeline(); + if (parentTimeline){ + // Get the Tracked Object's child clip + Clip* childClip = parentTimeline->GetClip(trackedObject->ChildClipId()); + if (childClip){ + std::shared_ptr f(new Frame(1, frame->GetWidth(), frame->GetHeight(), "#00000000")); + // Get the image of the child clip for this frame + std::shared_ptr childClipFrame = childClip->GetFrame(f, frame_number); + childClipImages.push_back(childClipFrame->GetImage()); + + // Set the Qt rectangle with the bounding-box properties + QRectF boxRect; + boxRect.setRect((int)((trackedBox.cx-trackedBox.width/2)*fw), + (int)((trackedBox.cy - trackedBox.height/2)*fh), + (int)(trackedBox.width*fw), + (int)(trackedBox.height*fh)); + boxRects.push_back(boxRect); + } + } + } + } } } - // Set image with drawn box to frame - // If the input image is NULL or doesn't have tracking data, it's returned as it came - frame->SetImageCV(cv_image); + // Update Qt image with new Opencv frame + frame->SetImageCV(cv_image); + + // Set the bounding-box image with the Tracked Object's child clip image + if(boxRects.size() > 0){ + // Get the frame image + QImage frameImage = *(frame->GetImage()); + for(int i; i < boxRects.size();i++){ + // Set a Qt painter to the frame image + QPainter painter(&frameImage); + // Draw the child clip image inside the bounding-box + painter.drawImage(boxRects[i], *childClipImages[i], QRectF(0, 0, frameImage.size().width(), frameImage.size().height())); + } + // Set the frame image as the composed image + frame->AddImage(std::make_shared(frameImage)); + } - return frame; + return frame; } -void ObjectDetection::drawPred(int classId, float conf, cv::Rect2d box, cv::Mat& frame) -{ - - //Draw a rectangle displaying the bounding box - cv::rectangle(frame, box, classesColor[classId], 2); +void ObjectDetection::DrawRectangleRGBA(cv::Mat &frame_image, cv::RotatedRect box, std::vector color, float alpha, + int thickness, bool is_background){ + // Get the bouding box vertices + cv::Point2f vertices2f[4]; + box.points(vertices2f); + + // TODO: take a rectangle of frame_image by refencence and draw on top of that to improve speed + // select min enclosing rectangle to draw on a small portion of the image + // cv::Rect rect = box.boundingRect(); + // cv::Mat image = frame_image(rect) + + if(is_background){ + cv::Mat overlayFrame; + frame_image.copyTo(overlayFrame); + + // draw bounding box background + cv::Point vertices[4]; + for(int i = 0; i < 4; ++i){ + vertices[i] = vertices2f[i];} + + cv::Rect rect = box.boundingRect(); + cv::fillConvexPoly(overlayFrame, vertices, 4, cv::Scalar(color[2],color[1],color[0]), cv::LINE_AA); + // add opacity + cv::addWeighted(overlayFrame, 1-alpha, frame_image, alpha, 0, frame_image); + } + else{ + cv::Mat overlayFrame; + frame_image.copyTo(overlayFrame); + + // Draw bounding box + for (int i = 0; i < 4; i++) + { + cv::line(overlayFrame, vertices2f[i], vertices2f[(i+1)%4], cv::Scalar(color[2],color[1],color[0]), + thickness, cv::LINE_AA); + } - //Get the label for the class name and its confidence - std::string label = cv::format("%.2f", conf); - if (!classNames.empty()) - { - CV_Assert(classId < (int)classNames.size()); - label = classNames[classId] + ":" + label; + // add opacity + cv::addWeighted(overlayFrame, 1-alpha, frame_image, alpha, 0, frame_image); } +} - //Display the label at the top of the bounding box - int baseLine; - cv::Size labelSize = cv::getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine); +void ObjectDetection::drawPred(int classId, float conf, cv::Rect2d box, cv::Mat& frame, int objectNumber, std::vector color, + float alpha, int thickness, bool is_background, bool display_text) +{ - double left = box.x; - double top = std::max((int)box.y, labelSize.height); + if(is_background){ + cv::Mat overlayFrame; + frame.copyTo(overlayFrame); - cv::rectangle(frame, cv::Point(left, top - round(1.025*labelSize.height)), cv::Point(left + round(1.025*labelSize.width), top + baseLine), classesColor[classId], cv::FILLED); - putText(frame, label, cv::Point(left+1, top), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0,0,0),1); + //Draw a rectangle displaying the bounding box + cv::rectangle(overlayFrame, box, cv::Scalar(color[2],color[1],color[0]), cv::FILLED); + + // add opacity + cv::addWeighted(overlayFrame, 1-alpha, frame, alpha, 0, frame); + } + else{ + cv::Mat overlayFrame; + frame.copyTo(overlayFrame); + + //Draw a rectangle displaying the bounding box + cv::rectangle(overlayFrame, box, cv::Scalar(color[2],color[1],color[0]), thickness); + + if(display_text){ + //Get the label for the class name and its confidence + std::string label = cv::format("%.2f", conf); + if (!classNames.empty()) + { + CV_Assert(classId < (int)classNames.size()); + label = classNames[classId] + ":" + label; + } + + //Display the label at the top of the bounding box + int baseLine; + cv::Size labelSize = cv::getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine); + + double left = box.x; + double top = std::max((int)box.y, labelSize.height); + + cv::rectangle(overlayFrame, cv::Point(left, top - round(1.025*labelSize.height)), cv::Point(left + round(1.025*labelSize.width), top + baseLine), + cv::Scalar(color[2],color[1],color[0]), cv::FILLED); + putText(overlayFrame, label, cv::Point(left+1, top), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0,0,0),1); + } + // add opacity + cv::addWeighted(overlayFrame, 1-alpha, frame, alpha, 0, frame); + } } // Load protobuf data file @@ -136,29 +298,30 @@ bool ObjectDetection::LoadObjDetectdData(std::string inputFilePath){ // Create tracker message pb_objdetect::ObjDetect objMessage; - { - // Read the existing tracker message. - std::fstream input(inputFilePath, std::ios::in | std::ios::binary); - if (!objMessage.ParseFromIstream(&input)) { - std::cerr << "Failed to parse protobuf message." << std::endl; - return false; - } + // Read the existing tracker message. + std::fstream input(inputFilePath, std::ios::in | std::ios::binary); + if (!objMessage.ParseFromIstream(&input)) { + std::cerr << "Failed to parse protobuf message." << std::endl; + return false; } - - // Make sure classNames and detectionsData are empty + + // Make sure classNames, detectionsData and trackedObjects are empty classNames.clear(); detectionsData.clear(); + trackedObjects.clear(); // Seed to generate same random numbers std::srand(1); // Get all classes names and assign a color to them - for(int i = 0; i < objMessage.classnames_size(); i++){ + for(int i = 0; i < objMessage.classnames_size(); i++) + { classNames.push_back(objMessage.classnames(i)); classesColor.push_back(cv::Scalar(std::rand()%205 + 50, std::rand()%205 + 50, std::rand()%205 + 50)); } // Iterate over all frames of the saved message - for (size_t i = 0; i < objMessage.frame_size(); i++) { + for (size_t i = 0; i < objMessage.frame_size(); i++) + { // Create protobuf message reader const pb_objdetect::Frame& pbFrameData = objMessage.frame(i); @@ -172,8 +335,11 @@ bool ObjectDetection::LoadObjDetectdData(std::string inputFilePath){ std::vector classIds; std::vector confidences; std::vector> boxes; + std::vector objectIds; - for(int i = 0; i < pbFrameData.bounding_box_size(); i++){ + // Iterate through the detected objects + for(int i = 0; i < pbFrameData.bounding_box_size(); i++) + { // Get bounding box coordinates float x = pBox.Get(i).x(); float y = pBox.Get(i).y(); @@ -183,6 +349,33 @@ bool ObjectDetection::LoadObjDetectdData(std::string inputFilePath){ int classId = pBox.Get(i).classid(); // Get prediction confidence float confidence = pBox.Get(i).confidence(); + + // Get the object Id + int objectId = pBox.Get(i).objectid(); + + // Search for the object id on trackedObjects map + auto trackedObject = trackedObjects.find(objectId); + // Check if object already exists on the map + if (trackedObject != trackedObjects.end()) + { + // Add a new BBox to it + trackedObject->second->AddBox(id, x+(w/2), y+(h/2), w, h, 0.0); + } + else + { + // There is no tracked object with that id, so insert a new one + TrackedObjectBBox trackedObj((int)classesColor[classId](0), (int)classesColor[classId](1), (int)classesColor[classId](2), (int)0); + trackedObj.AddBox(id, x+(w/2), y+(h/2), w, h, 0.0); + + std::shared_ptr trackedObjPtr = std::make_shared(trackedObj); + ClipBase* parentClip = this->ParentClip(); + trackedObjPtr->ParentClip(parentClip); + + // Create a temp ID. This ID is necessary to initialize the object_id Json list + // this Id will be replaced by the one created in the UI + trackedObjPtr->Id(std::to_string(objectId)); + trackedObjects.insert({objectId, trackedObjPtr}); + } // Create OpenCV rectangle with the bouding box info cv::Rect_ box(x, y, w, h); @@ -191,10 +384,11 @@ bool ObjectDetection::LoadObjDetectdData(std::string inputFilePath){ boxes.push_back(box); classIds.push_back(classId); confidences.push_back(confidence); + objectIds.push_back(objectId); } // Assign data to object detector map - detectionsData[id] = DetectionData(classIds, confidences, boxes, id); + detectionsData[id] = DetectionData(classIds, confidences, boxes, id, objectIds); } // Delete all global objects allocated by libprotobuf. @@ -203,82 +397,187 @@ bool ObjectDetection::LoadObjDetectdData(std::string inputFilePath){ return true; } -// Get tracker info for the desired frame -DetectionData ObjectDetection::GetTrackedData(size_t frameId){ +// Get the indexes and IDs of all visible objects in the given frame +std::string ObjectDetection::GetVisibleObjects(int64_t frame_number) const{ + + // Initialize the JSON objects + Json::Value root; + root["visible_objects_index"] = Json::Value(Json::arrayValue); + root["visible_objects_id"] = Json::Value(Json::arrayValue); + + // Check if track data exists for the requested frame + if (detectionsData.find(frame_number) == detectionsData.end()){ + return root.toStyledString(); + } + DetectionData detections = detectionsData.at(frame_number); + + // Iterate through the tracked objects + for(int i = 0; i 0 && + std::find(display_classes.begin(), display_classes.end(), classNames[detections.classIds.at(i)]) == display_classes.end()){ + continue; + } - // Check if the tracker info for the requested frame exists - if ( detectionsData.find(frameId) == detectionsData.end() ) { - return DetectionData(); - } else { - return detectionsData[frameId]; + int objectId = detections.objectIds.at(i); + // Search for the object in the trackedObjects map + auto trackedObject = trackedObjects.find(objectId); + + // Get the tracked object JSON properties for this frame + Json::Value trackedObjectJSON = trackedObject->second->PropertiesJSON(frame_number); + + if (trackedObjectJSON["visible"]["value"].asBool() && + trackedObject->second->ExactlyContains(frame_number)){ + // Save the object's index and ID if it's visible in this frame + root["visible_objects_index"].append(trackedObject->first); + root["visible_objects_id"].append(trackedObject->second->Id()); + } } + return root.toStyledString(); } // Generate JSON string of this object std::string ObjectDetection::Json() const { - // Return formatted string - return JsonValue().toStyledString(); + // Return formatted string + return JsonValue().toStyledString(); } // Generate Json::Value for this object Json::Value ObjectDetection::JsonValue() const { - // Create root json object - Json::Value root = EffectBase::JsonValue(); // get parent properties - root["type"] = info.class_name; - root["protobuf_data_path"] = protobuf_data_path; + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["protobuf_data_path"] = protobuf_data_path; + root["selected_object_index"] = selectedObjectIndex; + root["confidence_threshold"] = confidence_threshold; + root["display_box_text"] = display_box_text.JsonValue(); + + // Add tracked object's IDs to root + Json::Value objects; + for (auto const& trackedObject : trackedObjects){ + Json::Value trackedObjectJSON = trackedObject.second->JsonValue(); + // add object json + objects[trackedObject.second->Id()] = trackedObjectJSON; + } + root["objects"] = objects; - // return JsonValue - return root; + // return JsonValue + return root; } // Load JSON string into this object void ObjectDetection::SetJson(const std::string value) { - // Parse JSON string into JSON objects - try - { - const Json::Value root = openshot::stringToJson(value); - // Set all values that match - SetJsonValue(root); - } - catch (const std::exception& e) - { - // Error parsing JSON (or missing keys) - throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); - } + // Parse JSON string into JSON objects + try + { + const Json::Value root = openshot::stringToJson(value); + // Set all values that match + SetJsonValue(root); + } + catch (const std::exception& e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); + } } // Load Json::Value into this object void ObjectDetection::SetJsonValue(const Json::Value root) { + // Set parent data + EffectBase::SetJsonValue(root); - // Set parent data - EffectBase::SetJsonValue(root); - // Set data from Json (if key is found) - if (!root["protobuf_data_path"].isNull()){ - protobuf_data_path = (root["protobuf_data_path"].asString()); - - if(!LoadObjDetectdData(protobuf_data_path)){ - std::cout<<"Invalid protobuf data path"; - protobuf_data_path = ""; - } - } + // Set data from Json (if key is found) + if (!root["protobuf_data_path"].isNull() && protobuf_data_path.size() <= 1){ + protobuf_data_path = root["protobuf_data_path"].asString(); + + if(!LoadObjDetectdData(protobuf_data_path)){ + throw InvalidFile("Invalid protobuf data path", ""); + protobuf_data_path = ""; + } + } + + // Set the selected object index + if (!root["selected_object_index"].isNull()) + selectedObjectIndex = root["selected_object_index"].asInt(); + + if (!root["confidence_threshold"].isNull()) + confidence_threshold = root["confidence_threshold"].asFloat(); + + if (!root["display_box_text"].isNull()) + display_box_text.SetJsonValue(root["display_box_text"]); + + if (!root["class_filter"].isNull()){ + class_filter = root["class_filter"].asString(); + std::stringstream ss(class_filter); + display_classes.clear(); + while( ss.good() ) + { + // Parse comma separated string + std::string substr; + std::getline( ss, substr, ',' ); + display_classes.push_back( substr ); + } + } + + if (!root["objects"].isNull()){ + for (auto const& trackedObject : trackedObjects){ + std::string obj_id = std::to_string(trackedObject.first); + if(!root["objects"][obj_id].isNull()){ + trackedObject.second->SetJsonValue(root["objects"][obj_id]); + } + } + } + + // Set the tracked object's ids + if (!root["objects_id"].isNull()){ + for (auto const& trackedObject : trackedObjects){ + Json::Value trackedObjectJSON; + trackedObjectJSON["box_id"] = root["objects_id"][trackedObject.first].asString(); + trackedObject.second->SetJsonValue(trackedObjectJSON); + } + } } // Get all properties for a specific frame std::string ObjectDetection::PropertiesJSON(int64_t requested_frame) const { - // Generate JSON properties list - Json::Value root; - root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); - root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); - root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); - root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); - root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); - root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); - - // Return formatted string - return root.toStyledString(); + // Generate JSON properties list + Json::Value root; + + Json::Value objects; + if(trackedObjects.count(selectedObjectIndex) != 0){ + auto selectedObject = trackedObjects.at(selectedObjectIndex); + if (selectedObject){ + Json::Value trackedObjectJSON = selectedObject->PropertiesJSON(requested_frame); + // add object json + objects[selectedObject->Id()] = trackedObjectJSON; + } + } + root["objects"] = objects; + + root["selected_object_index"] = add_property_json("Selected Object", selectedObjectIndex, "int", "", NULL, 0, 200, false, requested_frame); + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + root["confidence_threshold"] = add_property_json("Confidence Theshold", confidence_threshold, "float", "", NULL, 0, 1, false, requested_frame); + root["class_filter"] = add_property_json("Class Filter", 0.0, "string", class_filter, NULL, -1, -1, false, requested_frame); + + root["display_box_text"] = add_property_json("Draw Box Text", display_box_text.GetValue(requested_frame), "int", "", &display_box_text, 0, 1.0, false, requested_frame); + root["display_box_text"]["choices"].append(add_property_choice_json("Off", 1, display_box_text.GetValue(requested_frame))); + root["display_box_text"]["choices"].append(add_property_choice_json("On", 0, display_box_text.GetValue(requested_frame))); + + // Return formatted string + return root.toStyledString(); } diff --git a/src/effects/ObjectDetection.h b/src/effects/ObjectDetection.h index 0dbb8cd6c..50e393080 100644 --- a/src/effects/ObjectDetection.h +++ b/src/effects/ObjectDetection.h @@ -2,6 +2,7 @@ * @file * @brief Header file for Object Detection effect class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -45,16 +46,24 @@ // Struct that stores the detected bounding boxes for all the clip frames struct DetectionData{ DetectionData(){} - DetectionData(std::vector _classIds, std::vector _confidences, std::vector> _boxes, size_t _frameId){ + DetectionData( + std::vector _classIds, + std::vector _confidences, + std::vector> _boxes, + size_t _frameId, + std::vector _objectIds) + { classIds = _classIds; confidences = _confidences; boxes = _boxes; frameId = _frameId; + objectIds = _objectIds; } size_t frameId; std::vector classIds; std::vector confidences; std::vector> boxes; + std::vector objectIds; }; namespace openshot @@ -70,18 +79,33 @@ namespace openshot std::vector classNames; std::vector classesColor; + + /// Draw class name and confidence score on top of the bounding box + Keyframe display_box_text; + /// Minimum confidence value to display the detected objects + float confidence_threshold = 0.5; + /// Contain the user selected classes for visualization + std::vector display_classes; + std::string class_filter; /// Init effect settings void init_effect_details(); + /// Draw bounding box with class and score text + void drawPred(int classId, float conf, cv::Rect2d box, cv::Mat& frame, int objectNumber, std::vector color, float alpha, + int thickness, bool is_background, bool draw_text); + /// Draw rotated rectangle with alpha channel + void DrawRectangleRGBA(cv::Mat &frame_image, cv::RotatedRect box, std::vector color, float alpha, int thickness, bool is_background); - void drawPred(int classId, float conf, cv::Rect2d box, cv::Mat& frame); public: - - ObjectDetection(); + /// Index of the Tracked Object that was selected to modify it's properties + int selectedObjectIndex; ObjectDetection(std::string clipTrackerDataPath); + /// Default constructor + ObjectDetection(); + /// @brief This method is required for all derived classes of EffectBase, and returns a /// modified openshot::Frame object /// @@ -98,7 +122,8 @@ namespace openshot /// Load protobuf data file bool LoadObjDetectdData(std::string inputFilePath); - DetectionData GetTrackedData(size_t frameId); + /// Get the indexes and IDs of all visible objects in the given frame + std::string GetVisibleObjects(int64_t frame_number) const override; // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object diff --git a/src/effects/Pixelate.cpp b/src/effects/Pixelate.cpp index 6d1962bbb..a68097922 100644 --- a/src/effects/Pixelate.cpp +++ b/src/effects/Pixelate.cpp @@ -184,6 +184,9 @@ std::string Pixelate::PropertiesJSON(int64_t requested_frame) const { root["right"] = add_property_json("Right Margin", right.GetValue(requested_frame), "float", "", &right, 0.0, 1.0, false, requested_frame); root["bottom"] = add_property_json("Bottom Margin", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 1.0, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Saturation.cpp b/src/effects/Saturation.cpp index 119c30651..4b9f12410 100644 --- a/src/effects/Saturation.cpp +++ b/src/effects/Saturation.cpp @@ -235,6 +235,9 @@ std::string Saturation::PropertiesJSON(int64_t requested_frame) const { root["saturation_G"] = add_property_json("Saturation (Green)", saturation_G.GetValue(requested_frame), "float", "", &saturation_G, 0.0, 4.0, false, requested_frame); root["saturation_B"] = add_property_json("Saturation (Blue)", saturation_B.GetValue(requested_frame), "float", "", &saturation_B, 0.0, 4.0, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Shift.cpp b/src/effects/Shift.cpp index 38b5be584..0a9c496d5 100644 --- a/src/effects/Shift.cpp +++ b/src/effects/Shift.cpp @@ -199,6 +199,9 @@ std::string Shift::PropertiesJSON(int64_t requested_frame) const { root["x"] = add_property_json("X Shift", x.GetValue(requested_frame), "float", "", &x, -1, 1, false, requested_frame); root["y"] = add_property_json("Y Shift", y.GetValue(requested_frame), "float", "", &y, -1, 1, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Stabilizer.cpp b/src/effects/Stabilizer.cpp index 83472b30b..576ab8834 100644 --- a/src/effects/Stabilizer.cpp +++ b/src/effects/Stabilizer.cpp @@ -2,6 +2,7 @@ * @file * @brief Source file for Stabilizer effect class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -238,6 +239,9 @@ std::string Stabilizer::PropertiesJSON(int64_t requested_frame) const { root["zoom"] = add_property_json("Zoom", zoom.GetValue(requested_frame), "float", "", &zoom, 0.0, 2.0, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/effects/Stabilizer.h b/src/effects/Stabilizer.h index b7ce0e5c7..1291fb73f 100644 --- a/src/effects/Stabilizer.h +++ b/src/effects/Stabilizer.h @@ -2,6 +2,7 @@ * @file * @brief Header file for Stabilizer effect class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -76,7 +77,7 @@ namespace openshot { /** - * @brief This class stabilizes video clip to remove undesired shaking and jitter. + * @brief This class stabilizes a video clip to remove undesired shaking and jitter. * * Adding stabilization is useful to increase video quality overall, since it removes * from subtle to harsh unexpected camera movements. diff --git a/src/effects/Tracker.cpp b/src/effects/Tracker.cpp index 26be9d168..01537f771 100644 --- a/src/effects/Tracker.cpp +++ b/src/effects/Tracker.cpp @@ -2,6 +2,7 @@ * @file * @brief Source file for Tracker effect class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -35,9 +36,14 @@ #include "effects/Tracker.h" #include "Exceptions.h" +#include "Timeline.h" #include +#include +#include +#include + using namespace std; using namespace openshot; using google::protobuf::util::TimeUtil; @@ -47,9 +53,16 @@ Tracker::Tracker(std::string clipTrackerDataPath) { // Init effect properties init_effect_details(); - - // Tries to load the tracker data from protobuf - LoadTrackedData(clipTrackerDataPath); + // Instantiate a TrackedObjectBBox object and point to it + TrackedObjectBBox trackedDataObject; + trackedData = std::make_shared(trackedDataObject); + // Tries to load the tracked object's data from protobuf file + trackedData->LoadBoxData(clipTrackerDataPath); + ClipBase* parentClip = this->ParentClip(); + trackedData->ParentClip(parentClip); + trackedData->Id(std::to_string(0)); + // Insert TrackedObject with index 0 to the trackedObjects map + trackedObjects.insert({0, trackedData}); } // Default constructor @@ -57,9 +70,17 @@ Tracker::Tracker() { // Init effect properties init_effect_details(); - + // Instantiate a TrackedObjectBBox object and point to it + TrackedObjectBBox trackedDataObject; + trackedData = std::make_shared(trackedDataObject); + ClipBase* parentClip = this->ParentClip(); + trackedData->ParentClip(parentClip); + trackedData->Id(std::to_string(0)); + // Insert TrackedObject with index 0 to the trackedObjects map + trackedObjects.insert({0, trackedData}); } + // Init effect settings void Tracker::init_effect_details() { @@ -72,96 +93,159 @@ void Tracker::init_effect_details() info.description = "Track the selected bounding box through the video."; info.has_audio = false; info.has_video = true; + info.has_tracked_object = true; + + this->TimeScale = 1.0; } // This method is required for all derived classes of EffectBase, and returns a // modified openshot::Frame object std::shared_ptr Tracker::GetFrame(std::shared_ptr frame, int64_t frame_number) { - // Get the frame's image cv::Mat frame_image = frame->GetImageCV(); + // Initialize the Qt rectangle that will hold the positions of the bounding-box + QRectF boxRect; + // Initialize the image of the TrackedObject child clip + std::shared_ptr childClipImage = nullptr; + // Check if frame isn't NULL - if(!frame_image.empty()){ + if(!frame_image.empty() && + trackedData->Contains(frame_number) && + trackedData->visible.GetValue(frame_number) == 1) + { + // Get the width and height of the image + float fw = frame_image.size().width; + float fh = frame_image.size().height; + + // Get the bounding-box of given frame + BBox fd = trackedData->GetBox(frame_number); // Check if track data exists for the requested frame - if (trackedDataById.find(frame_number) != trackedDataById.end()) { - - float fw = frame_image.size().width; - float fh = frame_image.size().height; - - // Draw box on image - EffectFrameData fd = trackedDataById[frame_number]; - cv::Rect2d box((int)(fd.x1*fw), - (int)(fd.y1*fh), - (int)((fd.x2-fd.x1)*fw), - (int)((fd.y2-fd.y1)*fh)); - cv::rectangle(frame_image, box, cv::Scalar( 255, 0, 0 ), 2, 1 ); - } + if (trackedData->draw_box.GetValue(frame_number) == 1) + { + std::vector stroke_rgba = trackedData->stroke.GetColorRGBA(frame_number); + int stroke_width = trackedData->stroke_width.GetValue(frame_number); + float stroke_alpha = trackedData->stroke_alpha.GetValue(frame_number); + std::vector bg_rgba = trackedData->background.GetColorRGBA(frame_number); + float bg_alpha = trackedData->background_alpha.GetValue(frame_number); + + // Create a rotated rectangle object that holds the bounding box + cv::RotatedRect box ( cv::Point2f( (int)(fd.cx*fw), (int)(fd.cy*fh) ), + cv::Size2f( (int)(fd.width*fw), (int)(fd.height*fh) ), + (int) (fd.angle) ); + + DrawRectangleRGBA(frame_image, box, bg_rgba, bg_alpha, 1, true); + DrawRectangleRGBA(frame_image, box, stroke_rgba, stroke_alpha, stroke_width, false); + } + + // Get the image of the Tracked Object' child clip + if (trackedData->ChildClipId() != ""){ + // Cast the parent timeline of this effect + Timeline* parentTimeline = (Timeline *) ParentTimeline(); + if (parentTimeline){ + // Get the Tracked Object's child clip + Clip* childClip = parentTimeline->GetClip(trackedData->ChildClipId()); + if (childClip){ + // Get the image of the child clip for this frame + std::shared_ptr f(new Frame(1, frame->GetWidth(), frame->GetHeight(), "#00000000")); + std::shared_ptr childClipFrame = childClip->GetFrame(f, frame_number); + childClipImage = childClipFrame->GetImage(); + + // Set the Qt rectangle with the bounding-box properties + boxRect.setRect((int)((fd.cx-fd.width/2)*fw), + (int)((fd.cy - fd.height/2)*fh), + (int)(fd.width*fw), + (int)(fd.height*fh) ); + } + } + } + } // Set image with drawn box to frame // If the input image is NULL or doesn't have tracking data, it's returned as it came frame->SetImageCV(frame_image); - return frame; -} - -// Load protobuf data file -bool Tracker::LoadTrackedData(std::string inputFilePath){ - using std::ios; - - // Create tracker message - pb_tracker::Tracker trackerMessage; - - { - // Read the existing tracker message. - std::fstream input(inputFilePath, ios::in | ios::binary); - if (!trackerMessage.ParseFromIstream(&input)) { - std::cerr << "Failed to parse protobuf message." << std::endl; - return false; - } - } + // Set the bounding-box image with the Tracked Object's child clip image + if (childClipImage){ + // Get the frame image + QImage frameImage = *(frame->GetImage()); - // Make sure the trackedData is empty - trackedDataById.clear(); + // Set a Qt painter to the frame image + QPainter painter(&frameImage); - // Iterate over all frames of the saved message - for (size_t i = 0; i < trackerMessage.frame_size(); i++) { - const pb_tracker::Frame& pbFrameData = trackerMessage.frame(i); + // Draw the child clip image inside the bounding-box + painter.drawImage(boxRect, *childClipImage, QRectF(0, 0, frameImage.size().width(), frameImage.size().height())); - // Load frame and rotation data - size_t id = pbFrameData.id(); - float rotation = pbFrameData.rotation(); - - // Load bounding box data - const pb_tracker::Frame::Box& box = pbFrameData.bounding_box(); - float x1 = box.x1(); - float y1 = box.y1(); - float x2 = box.x2(); - float y2 = box.y2(); + // Set the frame image as the composed image + frame->AddImage(std::make_shared(frameImage)); + } - // Assign data to tracker map - trackedDataById[id] = EffectFrameData(id, rotation, x1, y1, x2, y2); - } + return frame; +} - // Delete all global objects allocated by libprotobuf. - google::protobuf::ShutdownProtobufLibrary(); +void Tracker::DrawRectangleRGBA(cv::Mat &frame_image, cv::RotatedRect box, std::vector color, float alpha, int thickness, bool is_background){ + // Get the bouding box vertices + cv::Point2f vertices2f[4]; + box.points(vertices2f); + + // TODO: take a rectangle of frame_image by refencence and draw on top of that to improve speed + // select min enclosing rectangle to draw on a small portion of the image + // cv::Rect rect = box.boundingRect(); + // cv::Mat image = frame_image(rect) + + if(is_background){ + cv::Mat overlayFrame; + frame_image.copyTo(overlayFrame); + + // draw bounding box background + cv::Point vertices[4]; + for(int i = 0; i < 4; ++i){ + vertices[i] = vertices2f[i];} + + cv::Rect rect = box.boundingRect(); + cv::fillConvexPoly(overlayFrame, vertices, 4, cv::Scalar(color[2],color[1],color[0]), cv::LINE_AA); + // add opacity + cv::addWeighted(overlayFrame, 1-alpha, frame_image, alpha, 0, frame_image); + } + else{ + cv::Mat overlayFrame; + frame_image.copyTo(overlayFrame); + + // Draw bounding box + for (int i = 0; i < 4; i++) + { + cv::line(overlayFrame, vertices2f[i], vertices2f[(i+1)%4], cv::Scalar(color[2],color[1],color[0]), + thickness, cv::LINE_AA); + } - return true; + // add opacity + cv::addWeighted(overlayFrame, 1-alpha, frame_image, alpha, 0, frame_image); + } } -// Get tracker info for the desired frame -EffectFrameData Tracker::GetTrackedData(size_t frameId){ - - // Check if the tracker info for the requested frame exists - if ( trackedDataById.find(frameId) == trackedDataById.end() ) { - return EffectFrameData(); - } else { - return trackedDataById[frameId]; +// Get the indexes and IDs of all visible objects in the given frame +std::string Tracker::GetVisibleObjects(int64_t frame_number) const{ + + // Initialize the JSON objects + Json::Value root; + root["visible_objects_index"] = Json::Value(Json::arrayValue); + root["visible_objects_id"] = Json::Value(Json::arrayValue); + + // Iterate through the tracked objects + for (const auto& trackedObject : trackedObjects){ + // Get the tracked object JSON properties for this frame + Json::Value trackedObjectJSON = trackedObject.second->PropertiesJSON(frame_number); + if (trackedObjectJSON["visible"]["value"].asBool()){ + // Save the object's index and ID if it's visible in this frame + root["visible_objects_index"].append(trackedObject.first); + root["visible_objects_id"].append(trackedObject.second->Id()); + } } + return root.toStyledString(); } // Generate JSON string of this object @@ -176,8 +260,22 @@ Json::Value Tracker::JsonValue() const { // Create root json object Json::Value root = EffectBase::JsonValue(); // get parent properties + + // Save the effect's properties on root root["type"] = info.class_name; root["protobuf_data_path"] = protobuf_data_path; + root["BaseFPS"]["num"] = BaseFPS.num; + root["BaseFPS"]["den"] = BaseFPS.den; + root["TimeScale"] = this->TimeScale; + + // Add trackedObjects IDs to JSON + Json::Value objects; + for (auto const& trackedObject : trackedObjects){ + Json::Value trackedObjectJSON = trackedObject.second->JsonValue(); + // add object json + objects[trackedObject.second->Id()] = trackedObjectJSON; + } + root["objects"] = objects; // return JsonValue return root; @@ -198,6 +296,7 @@ void Tracker::SetJson(const std::string value) { // Error parsing JSON (or missing keys) throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); } + return; } // Load Json::Value into this object @@ -205,22 +304,73 @@ void Tracker::SetJsonValue(const Json::Value root) { // Set parent data EffectBase::SetJsonValue(root); - // Set data from Json (if key is found) - if (!root["protobuf_data_path"].isNull()){ - protobuf_data_path = (root["protobuf_data_path"].asString()); - if(!LoadTrackedData(protobuf_data_path)){ - std::cerr << "Invalid protobuf data path\n"; + if(!root["type"].isNull()) + info.class_name = root["type"].asString(); + + if (!root["BaseFPS"].isNull() && root["BaseFPS"].isObject()) + { + if (!root["BaseFPS"]["num"].isNull()) + { + BaseFPS.num = (int) root["BaseFPS"]["num"].asInt(); + } + if (!root["BaseFPS"]["den"].isNull()) + { + BaseFPS.den = (int) root["BaseFPS"]["den"].asInt(); + } + } + + if (!root["TimeScale"].isNull()) + TimeScale = (double) root["TimeScale"].asDouble(); + + // Set data from Json (if key is found) + if (!root["protobuf_data_path"].isNull() && protobuf_data_path.size() <= 1) + { + protobuf_data_path = root["protobuf_data_path"].asString(); + if(!trackedData->LoadBoxData(protobuf_data_path)) + { + std::clog << "Invalid protobuf data path " << protobuf_data_path << '\n'; protobuf_data_path = ""; } } + + if (!root["objects"].isNull()){ + for (auto const& trackedObject : trackedObjects){ + std::string obj_id = std::to_string(trackedObject.first); + if(!root["objects"][obj_id].isNull()){ + trackedObject.second->SetJsonValue(root["objects"][obj_id]); + } + } + } + + // Set the tracked object's ids + if (!root["objects_id"].isNull()){ + for (auto const& trackedObject : trackedObjects){ + Json::Value trackedObjectJSON; + trackedObjectJSON["box_id"] = root["objects_id"][trackedObject.first].asString(); + trackedObject.second->SetJsonValue(trackedObjectJSON); + } + } + + return; } // Get all properties for a specific frame std::string Tracker::PropertiesJSON(int64_t requested_frame) const { - + // Generate JSON properties list Json::Value root; + + // Add trackedObject properties to JSON + Json::Value objects; + for (auto const& trackedObject : trackedObjects){ + Json::Value trackedObjectJSON = trackedObject.second->PropertiesJSON(requested_frame); + // add object json + objects[trackedObject.second->Id()] = trackedObjectJSON; + } + root["objects"] = objects; + + // Append effect's properties root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); diff --git a/src/effects/Tracker.h b/src/effects/Tracker.h index e1f4cab47..645e1d52e 100644 --- a/src/effects/Tracker.h +++ b/src/effects/Tracker.h @@ -2,6 +2,7 @@ * @file * @brief Header file for Tracker effect class * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -41,55 +42,33 @@ #include "../KeyFrame.h" #include "protobuf_messages/trackerdata.pb.h" - -// Tracking info struct -struct EffectFrameData{ - size_t frame_id = -1; - float rotation = 0; - float x1 = -1; - float y1 = -1; - float x2 = -1; - float y2 = -1; - - // Constructors - EffectFrameData() - {} - - EffectFrameData( int _frame_id) - {frame_id = _frame_id;} - - EffectFrameData( int _frame_id , float _rotation, float _x1, float _y1, float _x2, float _y2) - { - frame_id = _frame_id; - rotation = _rotation; - x1 = _x1; - y1 = _y1; - x2 = _x2; - y2 = _y2; - } -}; - +#include "../TrackedObjectBBox.h" +#include "../Clip.h" namespace openshot { /** - * @brief This class track a given object through the clip and, when called, draws a box surrounding it. + * @brief This class tracks a given object through the clip, draws a box around it and allow + * the user to attach another clip (image or video) to the tracked object. * - * Tracking is useful to better visualize and follow the movement of an object through video. + * Tracking is useful to better visualize, follow the movement of an object through video + * and attach an image or video to it. */ class Tracker : public EffectBase { private: /// Init effect settings void init_effect_details(); - std::string protobuf_data_path; - public: + Fraction BaseFPS; + double TimeScale; - std::map trackedDataById; // Save object tracking box data + public: + std::string protobuf_data_path; ///< Path to the protobuf file that holds the bounding-box data + std::shared_ptr trackedData; ///< Pointer to an object that holds the bounding-box data and it's Keyframes - /// Blank constructor, useful when using Json to load the effect properties - Tracker(std::string clipTrackerDataPath); + /// Blank constructor, useful when using Json to load the effect properties + Tracker(std::string clipTrackerDataPath); /// Default constructor Tracker(); @@ -106,11 +85,10 @@ namespace openshot std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; std::shared_ptr GetFrame(int64_t frame_number) override { return GetFrame(std::shared_ptr (new Frame()), frame_number); } - // Load protobuf data file - bool LoadTrackedData(std::string inputFilePath); + /// Get the indexes and IDs of all visible objects in the given frame + std::string GetVisibleObjects(int64_t frame_number) const override; - // Get tracker info for the desired frame - EffectFrameData GetTrackedData(size_t frameId); + void DrawRectangleRGBA(cv::Mat &frame_image, cv::RotatedRect box, std::vector color, float alpha, int thickness, bool is_background); // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object diff --git a/src/effects/Wave.cpp b/src/effects/Wave.cpp index ff8a8230b..07e0372a5 100644 --- a/src/effects/Wave.cpp +++ b/src/effects/Wave.cpp @@ -187,6 +187,9 @@ std::string Wave::PropertiesJSON(int64_t requested_frame) const { root["shift_x"] = add_property_json("X Shift", shift_x.GetValue(requested_frame), "float", "", &shift_x, 0.0, 1000.0, false, requested_frame); root["speed_y"] = add_property_json("Vertical speed", speed_y.GetValue(requested_frame), "float", "", &speed_y, 0.0, 300.0, false, requested_frame); + // Set the parent effect which properties this effect will inherit + root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/src/protobuf_messages/objdetectdata.proto b/src/protobuf_messages/objdetectdata.proto index 49ad94afa..272ffb4f6 100644 --- a/src/protobuf_messages/objdetectdata.proto +++ b/src/protobuf_messages/objdetectdata.proto @@ -17,6 +17,7 @@ message Frame { float h = 4; int32 classId = 5; float confidence = 6; + int32 objectId = 7; } repeated Box bounding_box = 2; diff --git a/src/sort_filter/KalmanTracker.cpp b/src/sort_filter/KalmanTracker.cpp index 019239629..9e386202e 100644 --- a/src/sort_filter/KalmanTracker.cpp +++ b/src/sort_filter/KalmanTracker.cpp @@ -7,8 +7,6 @@ using namespace std; using namespace cv; -int KalmanTracker::kf_count = 0; - // initialize Kalman filter void KalmanTracker::init_kf( StateType stateMat) diff --git a/src/sort_filter/KalmanTracker.h b/src/sort_filter/KalmanTracker.h index f08683df0..65e186c04 100644 --- a/src/sort_filter/KalmanTracker.h +++ b/src/sort_filter/KalmanTracker.h @@ -21,18 +21,16 @@ class KalmanTracker m_hits = 0; m_hit_streak = 0; m_age = 0; - m_id = kf_count; - //kf_count++; + m_id = 0; } - KalmanTracker(StateType initRect, float confidence, int classId) : confidence(confidence), classId(classId) + KalmanTracker(StateType initRect, float confidence, int classId, int objectId) : confidence(confidence), classId(classId) { init_kf(initRect); m_time_since_update = 0; m_hits = 0; m_hit_streak = 0; m_age = 0; - m_id = kf_count; - kf_count++; + m_id = objectId; } ~KalmanTracker() @@ -47,8 +45,6 @@ class KalmanTracker StateType get_state(); StateType get_rect_xysr(float cx, float cy, float s, float r); - static int kf_count; - int m_time_since_update; int m_hits; int m_hit_streak; diff --git a/src/sort_filter/sort.cpp b/src/sort_filter/sort.cpp index b65d01a45..0958dcbad 100644 --- a/src/sort_filter/sort.cpp +++ b/src/sort_filter/sort.cpp @@ -54,7 +54,7 @@ void SortTracker::update(vector detections_cv, int frame_count, double tb.confidence = confidences[i]; detections.push_back(tb); - KalmanTracker trk = KalmanTracker(detections[i].box, detections[i].confidence, detections[i].classId); + KalmanTracker trk = KalmanTracker(detections[i].box, detections[i].confidence, detections[i].classId, i); trackers.push_back(trk); } return; @@ -167,7 +167,7 @@ void SortTracker::update(vector detections_cv, int frame_count, double // create and initialise new trackers for unmatched detections for (auto umd : unmatchedDetections) { - KalmanTracker tracker = KalmanTracker(detections[umd].box, detections[umd].confidence, detections[umd].classId); + KalmanTracker tracker = KalmanTracker(detections[umd].box, detections[umd].confidence, detections[umd].classId, umd); trackers.push_back(tracker); } diff --git a/src/sort_filter/sort.hpp b/src/sort_filter/sort.hpp index 9161ada90..ba9a2d7ad 100644 --- a/src/sort_filter/sort.hpp +++ b/src/sort_filter/sort.hpp @@ -39,7 +39,7 @@ class SortTracker double GetCentroidsDistance(cv::Rect_ bb_test, cv::Rect_ bb_gt); std::vector trackers; - double max_centroid_dist_norm = 0.15; + double max_centroid_dist_norm = 0.05; std::vector> predictedBoxes; std::vector> centroid_dist_matrix; diff --git a/tests/CVObjectDetection.cpp b/tests/CVObjectDetection.cpp index ce948074d..350f1b984 100644 --- a/tests/CVObjectDetection.cpp +++ b/tests/CVObjectDetection.cpp @@ -2,6 +2,7 @@ * @file * @brief Unit tests for CVObjectDetection * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -62,7 +63,7 @@ TEST_CASE( "DetectObject_Video", "[libopenshot][opencv][objectdetection]" ) //TODO remove hardcoded path CVObjectDetection objectDetector(effectInfo, processingController); - objectDetector.detectObjectsClip(c1, 0, 20, true); + objectDetector.detectObjectsClip(c1, 1, 20, true); CVDetectionData dd = objectDetector.GetDetectionData(20); @@ -97,7 +98,7 @@ TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][objectdetection]" ) //TODO remove hardcoded path CVObjectDetection objectDetector_1(effectInfo ,processingController); - objectDetector_1.detectObjectsClip(c1, 0, 20, true); + objectDetector_1.detectObjectsClip(c1, 1, 20, true); CVDetectionData dd_1 = objectDetector_1.GetDetectionData(20); diff --git a/tests/CVStabilizer.cpp b/tests/CVStabilizer.cpp index ed3e5403c..15782a574 100644 --- a/tests/CVStabilizer.cpp +++ b/tests/CVStabilizer.cpp @@ -2,6 +2,7 @@ * @file * @brief Unit tests for CVStabilizer * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -62,7 +63,7 @@ TEST_CASE( "Stabilize_Video", "[libopenshot][opencv][stabilizer]" ) CVStabilization stabilizer(json_data, stabilizer_pc); // Stabilize clip for frames 0-21 - stabilizer.stabilizeClip(c1, 0, 21, true); + stabilizer.stabilizeClip(c1, 1, 21, true); // Get stabilized data TransformParam tp = stabilizer.GetTransformParamData(20); @@ -106,7 +107,7 @@ TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][stabilizer]" ) CVStabilization stabilizer_1(json_data, stabilizer_pc); // Stabilize clip for frames 0-20 - stabilizer_1.stabilizeClip(c1, 0, 20+1, true); + stabilizer_1.stabilizeClip(c1, 1, 20+1, true); // Get stabilized data TransformParam tp_1 = stabilizer_1.GetTransformParamData(20); diff --git a/tests/CVTracker.cpp b/tests/CVTracker.cpp index 9548fc380..bb4470654 100644 --- a/tests/CVTracker.cpp +++ b/tests/CVTracker.cpp @@ -2,6 +2,7 @@ * @file * @brief Unit tests for CVTracker * @author Jonathan Thomas + * @author Brenno Caldato * * @ref License */ @@ -56,26 +57,26 @@ TEST_CASE( "Track_Video", "[libopenshot][opencv][tracker]" ) { "protobuf_data_path": "kcf_tracker.data", "tracker-type": "KCF", - "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} + "region": {"normalized_x": 0.459375, "normalized_y": 0.28333, "normalized_width": 0.28125, "normalized_height": 0.461111, "first-frame": 1} } )proto"; // Create tracker CVTracker kcfTracker(json_data, tracker_pc); // Track clip for frames 0-20 - kcfTracker.trackClip(c1, 0, 20, true); + kcfTracker.trackClip(c1, 1, 20, true); // Get tracked data FrameData fd = kcfTracker.GetTrackedData(20); - float x = fd.x1; - float y = fd.y1; - float width = fd.x2 - x; - float height = fd.y2 - y; + int x = (float)fd.x1 * 640; + int y = (float)fd.y1 * 360; + int width = ((float)fd.x2*640) - x; + int height = ((float)fd.y2*360) - y; // Compare if tracked data is equal to pre-tested ones - CHECK((int)(x * 640) == 259); - CHECK((int)(y * 360) == 131); - CHECK((int)(width * 640) == 180); - CHECK((int)(height * 360) == 166); + CHECK(x >= 255); CHECK(x <= 257); + CHECK(y >= 133); CHECK(y <= 135); + CHECK(width >= 179); CHECK(width <= 181); + CHECK(height >= 165); CHECK(height <= 168); } @@ -94,7 +95,7 @@ TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][tracker]" ) { "protobuf_data_path": "kcf_tracker.data", "tracker-type": "KCF", - "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} + "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 1} } )proto"; @@ -102,7 +103,7 @@ TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][tracker]" ) CVTracker kcfTracker_1(json_data, tracker_pc); // Track clip for frames 0-20 - kcfTracker_1.trackClip(c1, 0, 20, true); + kcfTracker_1.trackClip(c1, 1, 20, true); // Get tracked data FrameData fd_1 = kcfTracker_1.GetTrackedData(20); @@ -119,7 +120,7 @@ TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][tracker]" ) { "protobuf_data_path": "kcf_tracker.data", "tracker_type": "", - "region": {"x": -1, "y": -1, "width": -1, "height": -1, "first-frame": 0} + "region": {"x": -1, "y": -1, "width": -1, "height": -1, "first-frame": 1} } )proto"; // Create second tracker diff --git a/tests/KeyFrame.cpp b/tests/KeyFrame.cpp index feb4ccb7f..599c973f8 100644 --- a/tests/KeyFrame.cpp +++ b/tests/KeyFrame.cpp @@ -30,10 +30,19 @@ #include +#include +#include + #include "KeyFrame.h" #include "Exceptions.h" #include "Coordinate.h" #include "Fraction.h" +#include "Clip.h" +#include "Timeline.h" +#ifdef USE_OPENCV +#include "effects/Tracker.h" +#include "TrackedObjectBBox.h" +#endif #include "Point.h" using namespace openshot; @@ -506,3 +515,224 @@ TEST_CASE( "std::vector constructor", "[libopenshot][keyframe]" ) CHECK(k1.GetLength() == 11); CHECK(k1.GetValue(10) == Approx(30.0f).margin(0.0001)); } + +#ifdef USE_OPENCV +TEST_CASE( "TrackedObjectBBox init", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb(62,143,0,212); + + CHECK(kfb.delta_x.GetInt(1) == 0); + CHECK(kfb.delta_y.GetInt(1) == 0); + + CHECK(kfb.scale_x.GetInt(1) == 1); + CHECK(kfb.scale_y.GetInt(1) == 1); + + CHECK(kfb.rotation.GetInt(1) == 0); + + CHECK(kfb.stroke_width.GetInt(1) == 2); + CHECK(kfb.stroke_alpha.GetInt(1) == 0); + + CHECK(kfb.background_alpha .GetInt(1)== 1); + CHECK(kfb.background_corner.GetInt(1) == 0); + + CHECK(kfb.stroke.red.GetInt(1) == 62); + CHECK(kfb.stroke.green.GetInt(1) == 143); + CHECK(kfb.stroke.blue.GetInt(1) == 0); + CHECK(kfb.stroke.alpha.GetInt(1) == 212); + + CHECK(kfb.background.red.GetInt(1) == 0); + CHECK(kfb.background.green.GetInt(1) == 0); + CHECK(kfb.background.blue.GetInt(1) == 255); + CHECK(kfb.background.alpha.GetInt(1) == 0); + +} + +TEST_CASE( "TrackedObjectBBox AddBox and RemoveBox", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + + CHECK(kfb.Contains(1) == true); + CHECK(kfb.GetLength() == 1); + + kfb.RemoveBox(1); + + CHECK_FALSE(kfb.Contains(1)); + CHECK(kfb.GetLength() == 0); +} + +TEST_CASE( "TrackedObjectBBox GetVal", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + + BBox val = kfb.GetBox(1); + + CHECK(val.cx == 10.0); + CHECK(val.cy == 10.0); + CHECK(val.width == 100.0); + CHECK(val.height == 100.0); + CHECK(val.angle == 0.0); +} + +TEST_CASE( "TrackedObjectBBox GetVal interpolation", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + kfb.AddBox(11, 20.0, 20.0, 100.0, 100.0, 0.0); + kfb.AddBox(21, 30.0, 30.0, 100.0, 100.0, 0.0); + kfb.AddBox(31, 40.0, 40.0, 100.0, 100.0, 0.0); + + BBox val = kfb.GetBox(5); + + CHECK(val.cx == 14.0); + CHECK(val.cy == 14.0); + CHECK(val.width == 100.0); + CHECK(val.height == 100.0); + + val = kfb.GetBox(15); + + CHECK(val.cx == 24.0); + CHECK(val.cy == 24.0); + CHECK(val.width == 100.0); + CHECK(val.height == 100.0); + + val = kfb.GetBox(25); + + CHECK(val.cx == 34.0); + CHECK(val.cy == 34.0); + CHECK(val.width == 100.0); + CHECK(val.height == 100.0); + +} + + +TEST_CASE( "TrackedObjectBBox SetJson", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + kfb.AddBox(10, 20.0, 20.0, 100.0, 100.0, 0.0); + kfb.AddBox(20, 30.0, 30.0, 100.0, 100.0, 0.0); + kfb.AddBox(30, 40.0, 40.0, 100.0, 100.0, 0.0); + + kfb.scale_x.AddPoint(1, 2.0); + kfb.scale_x.AddPoint(10, 3.0); + + kfb.SetBaseFPS(Fraction(24.0, 1.0)); + + auto dataJSON = kfb.Json(); + TrackedObjectBBox fromJSON_kfb; + fromJSON_kfb.SetJson(dataJSON); + + int num_kfb = kfb.GetBaseFPS().num; + int num_fromJSON_kfb = fromJSON_kfb.GetBaseFPS().num; + CHECK(num_kfb == num_fromJSON_kfb); + + double time_kfb = kfb.FrameNToTime(1, 1.0); + double time_fromJSON_kfb = fromJSON_kfb.FrameNToTime(1, 1.0); + CHECK(time_kfb == time_fromJSON_kfb); + + BBox kfb_bbox = kfb.BoxVec[time_kfb]; + BBox fromJSON_bbox = fromJSON_kfb.BoxVec[time_fromJSON_kfb]; + + CHECK(kfb_bbox.cx == fromJSON_bbox.cx); + CHECK(kfb_bbox.cy == fromJSON_bbox.cy); + CHECK(kfb_bbox.width == fromJSON_bbox.width); + CHECK(kfb_bbox.height == fromJSON_bbox.height); + CHECK(kfb_bbox.angle == fromJSON_bbox.angle); +} + +TEST_CASE( "TrackedObjectBBox scaling", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 10.0, 10.0, 0.0); + kfb.scale_x.AddPoint(1.0, 2.0); + kfb.scale_y.AddPoint(1.0, 3.0); + + BBox bbox = kfb.GetBox(1); + + CHECK(bbox.width == 20.0); + CHECK(bbox.height == 30.0); +} + +TEST_CASE( "AttachToObject", "[libopenshot][keyframe]" ) +{ + std::stringstream path1, path2; + path1 << TEST_MEDIA_PATH << "test.avi"; + path2 << TEST_MEDIA_PATH << "run.mp4"; + + // Create Timelime + Timeline t(1280, 720, Fraction(25,1), 44100, 2, ChannelLayout::LAYOUT_STEREO); + + // Create Clip and add it to the Timeline + Clip clip(new FFmpegReader(path1.str())); + clip.Id("AAAA1234"); + + // Create a child clip and add it to the Timeline + Clip childClip(new FFmpegReader(path2.str())); + childClip.Id("CHILD123"); + + // Add clips to timeline + t.AddClip(&childClip); + t.AddClip(&clip); + + // Create tracker and add it to clip + Tracker tracker; + clip.AddEffect(&tracker); + + // Save a pointer to trackedData + std::shared_ptr trackedData = tracker.trackedData; + + // Change trackedData scale + trackedData->scale_x.AddPoint(1, 2.0); + CHECK(trackedData->scale_x.GetValue(1) == 2.0); + + // Tracked Data JSON + auto trackedDataJson = trackedData->JsonValue(); + + // Get and cast the trakcedObjec + std::list ids = t.GetTrackedObjectsIds(); + auto trackedObject_base = t.GetTrackedObject(ids.front()); + auto trackedObject = std::make_shared(); + trackedObject = std::dynamic_pointer_cast(trackedObject_base); + CHECK(trackedObject == trackedData); + + // Set trackedObject Json Value + trackedObject->SetJsonValue(trackedDataJson); + + // Attach childClip to tracked object + std::string tracked_id = trackedData->Id(); + childClip.Open(); + childClip.AttachToObject(tracked_id); + + auto trackedTest = std::make_shared(); + trackedTest = std::dynamic_pointer_cast(childClip.GetAttachedObject()); + + CHECK(trackedData->scale_x.GetValue(1) == trackedTest->scale_x.GetValue(1)); + + auto frameTest = childClip.GetFrame(1); + childClip.Close(); + // XXX: Here, too, there needs to be some sort of actual _testing_ of the results +} + +TEST_CASE( "GetBoxValues", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox trackedDataObject; + trackedDataObject.AddBox(1, 10.0, 10.0, 20.0, 20.0, 30.0); + + auto trackedData = std::make_shared(trackedDataObject); + + auto boxValues = trackedData->GetBoxValues(1); + + CHECK(boxValues["cx"] == 10.0); + CHECK(boxValues["cy"] == 10.0); + CHECK(boxValues["w"] == 20.0); + CHECK(boxValues["h"] == 20.0); + CHECK(boxValues["ang"] == 30.0); +} +#endif \ No newline at end of file diff --git a/tests/KeyFrame_Tests.cpp b/tests/KeyFrame_Tests.cpp new file mode 100644 index 000000000..14864cadb --- /dev/null +++ b/tests/KeyFrame_Tests.cpp @@ -0,0 +1,710 @@ +/** + * @file + * @brief Unit tests for openshot::Keyframe + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include +#include + +#include "UnitTest++.h" +// Prevent name clashes with juce::UnitTest +#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include "KeyFrame.h" +#ifdef USE_OPENCV +#include "TrackedObjectBBox.h" +#endif +#include "Exceptions.h" +#include "Coordinate.h" +#include "Fraction.h" +#include "Clip.h" +#include "Timeline.h" +#include "effects/Tracker.h" +#include "Point.h" + +using namespace std; +using namespace openshot; + +SUITE(Keyframe) { + +TEST(GetPoint_With_No_Points) +{ + // Create an empty keyframe + Keyframe k1; + + CHECK_THROW(k1.GetPoint(0), OutOfBoundsPoint); +} + +TEST(GetPoint_With_1_Points) +{ + // Create an empty keyframe + Keyframe k1; + k1.AddPoint(openshot::Point(2,3)); + + CHECK_THROW(k1.GetPoint(-1), OutOfBoundsPoint); + CHECK_EQUAL(1, k1.GetCount()); + CHECK_CLOSE(2.0f, k1.GetPoint(0).co.X, 0.00001); + CHECK_CLOSE(3.0f, k1.GetPoint(0).co.Y, 0.00001); + CHECK_THROW(k1.GetPoint(1), OutOfBoundsPoint); +} + + +TEST(AddPoint_With_1_Point) +{ + // Create an empty keyframe + Keyframe k1; + k1.AddPoint(openshot::Point(2,9)); + + CHECK_CLOSE(2.0f, k1.GetPoint(0).co.X, 0.00001); + CHECK_THROW(k1.GetPoint(-1), OutOfBoundsPoint); + CHECK_THROW(k1.GetPoint(1), OutOfBoundsPoint); +} + +TEST(AddPoint_With_2_Points) +{ + // Create an empty keyframe + Keyframe k1; + k1.AddPoint(openshot::Point(2,9)); + k1.AddPoint(openshot::Point(5,20)); + + CHECK_CLOSE(2.0f, k1.GetPoint(0).co.X, 0.00001); + CHECK_CLOSE(5.0f, k1.GetPoint(1).co.X, 0.00001); + CHECK_THROW(k1.GetPoint(-1), OutOfBoundsPoint); + CHECK_THROW(k1.GetPoint(2), OutOfBoundsPoint); +} + +TEST(GetValue_For_Bezier_Curve_2_Points) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 4), BEZIER)); + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(-1), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(1.12414f, kf.GetValue(9), 0.0001); + CHECK_CLOSE(1.86370f, kf.GetValue(20), 0.0001); + CHECK_CLOSE(3.79733f, kf.GetValue(40), 0.0001); + CHECK_CLOSE(4.0f, kf.GetValue(50), 0.0001); + // Check the expected number of values + CHECK_EQUAL(51, kf.GetLength()); +} + +TEST(GetValue_For_Bezier_Curve_5_Points_40_Percent_Handle) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 4), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(100, 10), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(150, 0), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(200, 3), BEZIER)); + + // Spot check values from the curve + CHECK_CLOSE(kf.GetValue(-1), 1.0f, 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(2.68197f, kf.GetValue(27), 0.0001); + CHECK_CLOSE(7.47719f, kf.GetValue(77), 0.0001); + CHECK_CLOSE(4.20468f, kf.GetValue(127), 0.0001); + CHECK_CLOSE(1.73860f, kf.GetValue(177), 0.0001); + CHECK_CLOSE(3.0f, kf.GetValue(200), 0.0001); + // Check the expected number of values + CHECK_EQUAL(201, kf.GetLength()); +} + +TEST(GetValue_For_Bezier_Curve_5_Points_25_Percent_Handle) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 4), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(100, 10), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(150, 0), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(200, 3), BEZIER)); + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(-1), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(2.68197f, kf.GetValue(27), 0.0001); + CHECK_CLOSE(7.47719f, kf.GetValue(77), 0.0001); + CHECK_CLOSE(4.20468f, kf.GetValue(127), 0.0001); + CHECK_CLOSE(1.73860f, kf.GetValue(177), 0.0001); + CHECK_CLOSE(3.0f, kf.GetValue(200), 0.0001); + // Check the expected number of values + CHECK_EQUAL(201, kf.GetLength()); +} + +TEST(GetValue_For_Linear_Curve_3_Points) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), LINEAR)); + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(-1), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(3.33333f, kf.GetValue(9), 0.0001); + CHECK_CLOSE(6.54167f, kf.GetValue(20), 0.0001); + CHECK_CLOSE(4.4f, kf.GetValue(40), 0.0001); + CHECK_CLOSE(2.0f, kf.GetValue(50), 0.0001); + // Check the expected number of values + CHECK_EQUAL(51, kf.GetLength()); +} + +TEST(GetValue_For_Constant_Curve_3_Points) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), CONSTANT)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), CONSTANT)); + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(-1), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(0), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.0001); + CHECK_CLOSE(1.0f, kf.GetValue(24), 0.0001); + CHECK_CLOSE(8.0f, kf.GetValue(25), 0.0001); + CHECK_CLOSE(8.0f, kf.GetValue(40), 0.0001); + CHECK_CLOSE(8.0f, kf.GetValue(49), 0.0001); + CHECK_CLOSE(2.0f, kf.GetValue(50), 0.0001); + // Check the expected number of values + CHECK_EQUAL(51, kf.GetLength()); +} + +TEST(Check_Direction_and_Repeat_Fractions) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 500); + kf.AddPoint(400, 100); + kf.AddPoint(500, 500); + + // Spot check values from the curve + CHECK_EQUAL(500, kf.GetInt(1)); + CHECK_EQUAL(false, kf.IsIncreasing(1)); + CHECK_EQUAL(1, kf.GetRepeatFraction(1).num); + CHECK_EQUAL(13, kf.GetRepeatFraction(1).den); + CHECK_EQUAL(500, kf.GetDelta(1)); + + CHECK_EQUAL(498, kf.GetInt(24)); + CHECK_EQUAL(false, kf.IsIncreasing(24)); + CHECK_EQUAL(3, kf.GetRepeatFraction(24).num); + CHECK_EQUAL(6, kf.GetRepeatFraction(24).den); + CHECK_EQUAL(0, kf.GetDelta(24)); + + CHECK_EQUAL(100, kf.GetLong(390)); + CHECK_EQUAL(true, kf.IsIncreasing(390)); + CHECK_EQUAL(3, kf.GetRepeatFraction(390).num); + CHECK_EQUAL(16, kf.GetRepeatFraction(390).den); + CHECK_EQUAL(0, kf.GetDelta(390)); + + CHECK_EQUAL(100, kf.GetLong(391)); + CHECK_EQUAL(true, kf.IsIncreasing(391)); + CHECK_EQUAL(4, kf.GetRepeatFraction(391).num); + CHECK_EQUAL(16, kf.GetRepeatFraction(391).den); + CHECK_EQUAL(-1, kf.GetDelta(388)); +} + + +TEST(Get_Closest_Point) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 0.0); + kf.AddPoint(1000, 1.0); + kf.AddPoint(2500, 0.0); + + // Spot check values from the curve (to the right) + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(900, 900)).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(1, 1)).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(5, 5)).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(1000, 1000)).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(1001, 1001)).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(2500, 2500)).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(3000, 3000)).co.X); + + // Spot check values from the curve (to the left) + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(900, 900), true).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(1, 1), true).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(5, 5), true).co.X); + CHECK_EQUAL(1, kf.GetClosestPoint(openshot::Point(1000, 1000), true).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(1001, 1001), true).co.X); + CHECK_EQUAL(1000, kf.GetClosestPoint(openshot::Point(2500, 2500), true).co.X); + CHECK_EQUAL(2500, kf.GetClosestPoint(openshot::Point(3000, 3000), true).co.X); +} + + +TEST(Get_Previous_Point) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 0.0); + kf.AddPoint(1000, 1.0); + kf.AddPoint(2500, 0.0); + + // Spot check values from the curve + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(900, 900))).co.X); + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1, 1))).co.X); + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(5, 5))).co.X); + CHECK_EQUAL(1, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1000, 1000))).co.X); + CHECK_EQUAL(1000, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1001, 1001))).co.X); + CHECK_EQUAL(1000, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(2500, 2500))).co.X); + CHECK_EQUAL(1000, kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(3000, 3000))).co.X); + +} + +TEST(Get_Max_Point) +{ + // Create a keyframe curve + Keyframe kf; + kf.AddPoint(1, 1.0); + + // Spot check values from the curve + CHECK_EQUAL(1.0, kf.GetMaxPoint().co.Y); + + kf.AddPoint(2, 0.0); + + // Spot check values from the curve + CHECK_EQUAL(1.0, kf.GetMaxPoint().co.Y); + + kf.AddPoint(3, 2.0); + + // Spot check values from the curve + CHECK_EQUAL(2.0, kf.GetMaxPoint().co.Y); + + kf.AddPoint(4, 1.0); + + // Spot check values from the curve + CHECK_EQUAL(2.0, kf.GetMaxPoint().co.Y); +} + +TEST(Scale_Keyframe) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), BEZIER)); + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); + CHECK_CLOSE(7.99f, kf.GetValue(24), 0.01); + CHECK_CLOSE(8.0f, kf.GetValue(25), 0.01); + CHECK_CLOSE(3.85f, kf.GetValue(40), 0.01); + CHECK_CLOSE(2.01f, kf.GetValue(49), 0.01); + CHECK_CLOSE(2.0f, kf.GetValue(50), 0.01); + + // Resize / Scale the keyframe + kf.ScalePoints(2.0); // 100% larger + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); + CHECK_CLOSE(4.08f, kf.GetValue(24), 0.01); + CHECK_CLOSE(4.36f, kf.GetValue(25), 0.01); + CHECK_CLOSE(7.53f, kf.GetValue(40), 0.01); + CHECK_CLOSE(7.99f, kf.GetValue(49), 0.01); + CHECK_CLOSE(8.0f, kf.GetValue(50), 0.01); + CHECK_CLOSE(2.39f, kf.GetValue(90), 0.01); + CHECK_CLOSE(2.0f, kf.GetValue(100), 0.01); + + // Resize / Scale the keyframe + kf.ScalePoints(0.5); // 50% smaller, which should match the original size + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); + CHECK_CLOSE(7.99f, kf.GetValue(24), 0.01); + CHECK_CLOSE(8.0f, kf.GetValue(25), 0.01); + CHECK_CLOSE(3.85f, kf.GetValue(40), 0.01); + CHECK_CLOSE(2.01f, kf.GetValue(49), 0.01); + CHECK_CLOSE(2.0f, kf.GetValue(50), 0.01); + +} + +TEST(Flip_Keyframe) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(100, 10), LINEAR)); + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); + CHECK_CLOSE(8.0f, kf.GetValue(25), 0.01); + CHECK_CLOSE(2.0f, kf.GetValue(50), 0.01); + CHECK_CLOSE(10.0f, kf.GetValue(100), 0.01); + + // Flip the points + kf.FlipPoints(); + + // Spot check values from the curve + CHECK_CLOSE(10.0f, kf.GetValue(1), 0.01); + CHECK_CLOSE(2.0f, kf.GetValue(25), 0.01); + CHECK_CLOSE(8.0f, kf.GetValue(50), 0.01); + CHECK_CLOSE(1.0f, kf.GetValue(100), 0.01); + + // Flip the points again (back to the original) + kf.FlipPoints(); + + // Spot check values from the curve + CHECK_CLOSE(1.0f, kf.GetValue(1), 0.01); + CHECK_CLOSE(8.0f, kf.GetValue(25), 0.01); + CHECK_CLOSE(2.0f, kf.GetValue(50), 0.01); + CHECK_CLOSE(10.0f, kf.GetValue(100), 0.01); +} + +TEST(Remove_Duplicate_Point) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 0.0); + kf.AddPoint(1, 1.0); + kf.AddPoint(1, 2.0); + + // Spot check values from the curve + CHECK_EQUAL(1, kf.GetLength()); + CHECK_CLOSE(2.0, kf.GetPoint(0).co.Y, 0.01); +} + +TEST(Large_Number_Values) +{ + // Large value + int64_t const large_value = 30 * 60 * 90; + + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 1.0); + kf.AddPoint(large_value, 100.0); // 90 minutes long + + // Spot check values from the curve + CHECK_EQUAL(large_value + 1, kf.GetLength()); + CHECK_CLOSE(1.0, kf.GetPoint(0).co.Y, 0.01); + CHECK_CLOSE(100.0, kf.GetPoint(1).co.Y, 0.01); +} + +TEST(Remove_Point) +{ + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(openshot::Point(Coordinate(3, 100), CONSTANT)); + CHECK_EQUAL(1, kf.GetInt(2)); + kf.AddPoint(openshot::Point(Coordinate(2, 50), CONSTANT)); + CHECK_EQUAL(50, kf.GetInt(2)); + kf.RemovePoint(1); // This is the index of point with X == 2 + CHECK_EQUAL(1, kf.GetInt(2)); + CHECK_THROW(kf.RemovePoint(100), OutOfBoundsPoint); +} + +TEST(Constant_Interpolation_First_Segment) +{ + Keyframe kf; + kf.AddPoint(Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(Point(Coordinate(2, 50), CONSTANT)); + kf.AddPoint(Point(Coordinate(3, 100), CONSTANT)); + CHECK_EQUAL(1, kf.GetInt(0)); + CHECK_EQUAL(1, kf.GetInt(1)); + CHECK_EQUAL(50, kf.GetInt(2)); + CHECK_EQUAL(100, kf.GetInt(3)); + CHECK_EQUAL(100, kf.GetInt(4)); +} + +TEST(isIncreasing) +{ + // Which cases need to be tested to keep same behaviour as + // previously? + // + // - "invalid point" => true + // - point where all next values are equal => false + // - point where first non-eq next value is smaller => false + // - point where first non-eq next value is larger => true + Keyframe kf; + kf.AddPoint(1, 1, LINEAR); // testing with linear + kf.AddPoint(3, 5, BEZIER); // testing with bezier + kf.AddPoint(6, 10, CONSTANT); // first non-eq is smaller + kf.AddPoint(8, 8, CONSTANT); // first non-eq is larger + kf.AddPoint(10, 10, CONSTANT); // all next values are equal + kf.AddPoint(15, 10, CONSTANT); + + // "invalid points" + CHECK_EQUAL(true, kf.IsIncreasing(0)); + CHECK_EQUAL(true, kf.IsIncreasing(15)); + // all next equal + CHECK_EQUAL(false, kf.IsIncreasing(12)); + // first non-eq is larger + CHECK_EQUAL(true, kf.IsIncreasing(8)); + // first non-eq is smaller + CHECK_EQUAL(false, kf.IsIncreasing(6)); + // bezier and linear + CHECK_EQUAL(true, kf.IsIncreasing(4)); + CHECK_EQUAL(true, kf.IsIncreasing(2)); +} + +TEST(GetLength) +{ + Keyframe f; + CHECK_EQUAL(0, f.GetLength()); + f.AddPoint(1, 1); + CHECK_EQUAL(1, f.GetLength()); + f.AddPoint(2, 1); + CHECK_EQUAL(3, f.GetLength()); + f.AddPoint(200, 1); + CHECK_EQUAL(201, f.GetLength()); + + Keyframe g; + g.AddPoint(200, 1); + CHECK_EQUAL(1, g.GetLength()); + g.AddPoint(1,1); + CHECK_EQUAL(201, g.GetLength()); +} + +TEST(Use_Interpolation_of_Segment_End_Point) +{ + Keyframe f; + f.AddPoint(1,0, CONSTANT); + f.AddPoint(100,155, BEZIER); + CHECK_CLOSE(75.9, f.GetValue(50), 0.1); +} + +TEST(Handle_Large_Segment) +{ + Keyframe kf; + kf.AddPoint(1, 0, CONSTANT); + kf.AddPoint(1000000, 1, LINEAR); + UNITTEST_TIME_CONSTRAINT(10); // 10 milliseconds would still be relatively slow, but need to think about slower build machines! + CHECK_CLOSE(0.5, kf.GetValue(500000), 0.01); + CHECK_EQUAL(true, kf.IsIncreasing(10)); + Fraction fr = kf.GetRepeatFraction(250000); + CHECK_CLOSE(0.5, (double)fr.num / fr.den, 0.01); +} + +TEST(Point_Vector_Constructor) +{ + std::vector points{Point(1, 10), Point(5, 20), Point(10, 30)}; + Keyframe k1(points); + + CHECK_EQUAL(11, k1.GetLength()); + CHECK_CLOSE(30.0f, k1.GetValue(10), 0.0001); +} + +}; // SUITE + +#ifdef USE_OPENCV +TEST(TrackedObjectBBox_init_test) { + + TrackedObjectBBox kfb; + +} + +TEST(TrackedObjectBBox_addBox_test) { + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + + CHECK_EQUAL(true, kfb.Contains(1)); + CHECK_EQUAL(1, kfb.GetLength()); + + kfb.RemoveBox(1); + + CHECK_EQUAL(false, kfb.Contains(1)); + CHECK_EQUAL(0, kfb.GetLength()); +} + + +TEST(TrackedObjectBBox_GetVal_test) { + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + + BBox val = kfb.GetBox(1); + + CHECK_EQUAL(10.0, val.cx); + CHECK_EQUAL(10.0, val.cy); + CHECK_EQUAL(100.0,val.width); + CHECK_EQUAL(100.0,val.height); + CHECK_EQUAL(0.0, val.angle); +} + + +TEST(TrackedObjectBBox_GetVal_Interpolation) { + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + kfb.AddBox(11, 20.0, 20.0, 100.0, 100.0, 0.0); + kfb.AddBox(21, 30.0, 30.0, 100.0, 100.0, 0.0); + kfb.AddBox(31, 40.0, 40.0, 100.0, 100.0, 0.0); + + BBox val = kfb.GetBox(5); + + CHECK_EQUAL(14.0, val.cx); + CHECK_EQUAL(14.0, val.cy); + CHECK_EQUAL(100.0,val.width); + CHECK_EQUAL(100.0,val.height); + + val = kfb.GetBox(15); + + CHECK_EQUAL(24.0, val.cx); + CHECK_EQUAL(24.0, val.cy); + CHECK_EQUAL(100.0,val.width); + CHECK_EQUAL(100.0, val.height); + + val = kfb.GetBox(25); + + CHECK_EQUAL(34.0, val.cx); + CHECK_EQUAL(34.0, val.cy); + CHECK_EQUAL(100.0,val.width); + CHECK_EQUAL(100.0, val.height); + +} + + +TEST(TrackedObjectBBox_Json_set) { + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + kfb.AddBox(10, 20.0, 20.0, 100.0, 100.0, 0.0); + kfb.AddBox(20, 30.0, 30.0, 100.0, 100.0, 0.0); + kfb.AddBox(30, 40.0, 40.0, 100.0, 100.0, 0.0); + + kfb.scale_x.AddPoint(1, 2.0); + kfb.scale_x.AddPoint(10, 3.0); + + kfb.SetBaseFPS(Fraction(24.0, 1.0)); + + auto dataJSON = kfb.Json(); + TrackedObjectBBox fromJSON_kfb; + fromJSON_kfb.SetJson(dataJSON); + + CHECK_EQUAL(kfb.GetBaseFPS().num, fromJSON_kfb.GetBaseFPS().num); + + double time_kfb = kfb.FrameNToTime(1, 1.0); + double time_fromJSON_kfb = fromJSON_kfb.FrameNToTime(1, 1.0); + CHECK_EQUAL(time_kfb, time_fromJSON_kfb); + + BBox kfb_bbox = kfb.BoxVec[time_kfb]; + BBox fromJSON_bbox = fromJSON_kfb.BoxVec[time_fromJSON_kfb]; + + CHECK_EQUAL(kfb_bbox.cx, fromJSON_bbox.cx); + CHECK_EQUAL(kfb_bbox.cy, fromJSON_bbox.cy); + CHECK_EQUAL(kfb_bbox.width, fromJSON_bbox.width); + CHECK_EQUAL(kfb_bbox.height, fromJSON_bbox.height); + CHECK_EQUAL(kfb_bbox.angle, fromJSON_bbox.angle); +} + +TEST(TrackedObjectBBox_Scale_test){ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 10.0, 10.0, 0.0); + kfb.scale_x.AddPoint(1.0, 2.0); + kfb.scale_y.AddPoint(1.0, 3.0); + + BBox bbox = kfb.GetBox(1); + + CHECK_EQUAL(20.0, bbox.width); + CHECK_EQUAL(30.0, bbox.height); +} + +TEST(Attach_test){ + + std::stringstream path1, path2; + path1 << TEST_MEDIA_PATH << "test.avi"; + path2 << TEST_MEDIA_PATH << "run.mp4"; + + // Create Timelime + Timeline t(1280, 720, Fraction(25,1), 44100, 2, ChannelLayout::LAYOUT_STEREO); + + // Create Clip and add it to the Timeline + Clip clip(new FFmpegReader(path1.str())); + clip.Id("AAAA1234"); + + // Create a child clip and add it to the Timeline + Clip childClip(new FFmpegReader(path2.str())); + childClip.Id("CHILD123"); + + // Add clips to timeline + t.AddClip(&childClip); + t.AddClip(&clip); + + // Create tracker and add it to clip + Tracker tracker; + clip.AddEffect(&tracker); + + // Save a pointer to trackedData + std::shared_ptr trackedData = tracker.trackedData; + + // Change trackedData scale + trackedData->scale_x.AddPoint(1, 2.0); + CHECK_EQUAL(2.0, trackedData->scale_x.GetValue(1)); + + // Tracked Data JSON + auto trackedDataJson = trackedData->JsonValue(); + + // Get and cast the trakcedObject + auto trackedObject_base = t.GetTrackedObject(""); + std::shared_ptr trackedObject = std::static_pointer_cast(trackedObject_base); + CHECK_EQUAL(trackedData, trackedObject); + + // Set trackedObject Json Value + trackedObject->SetJsonValue(trackedDataJson); + + // Attach childClip to tracked object + std::string tracked_id = trackedData->Id(); + childClip.Open(); + childClip.AttachToObject(tracked_id); + + std::shared_ptr trackedTest = std::static_pointer_cast(childClip.GetAttachedObject()); + + CHECK_EQUAL(trackedData->scale_x.GetValue(1), trackedTest->scale_x.GetValue(1)); + + auto frameTest = childClip.GetFrame(1); + childClip.Close(); +} + +TEST(GetBoxValues_test){ + + TrackedObjectBBox trackedDataObject; + trackedDataObject.AddBox(1, 10.0, 10.0, 20.0, 20.0, 30.0); + + std::shared_ptr trackedData = std::make_shared(trackedDataObject); + + auto boxValues = trackedData->GetBoxValues(1); + + CHECK_EQUAL(10.0, boxValues["cx"]); + CHECK_EQUAL(10.0, boxValues["cy"]); + CHECK_EQUAL(20.0, boxValues["w"]); + CHECK_EQUAL(20.0, boxValues["h"]); + CHECK_EQUAL(30.0, boxValues["ang"]); +} +#endif \ No newline at end of file diff --git a/tests/Point.cpp b/tests/Point.cpp index 310606d84..6d53f65bf 100644 --- a/tests/Point.cpp +++ b/tests/Point.cpp @@ -30,6 +30,7 @@ #include #include + #include "Point.h" #include "Enums.h" #include "Exceptions.h"