Skip to content

Commit

Permalink
Merge pull request #459 from OpenShot/timeline-reader-improvements
Browse files Browse the repository at this point in the history
Adding ability for a Clip to auto-detect and instantiate a Timeline Reader
  • Loading branch information
jonoomph committed Mar 11, 2020
2 parents 4058dde + 4ea3623 commit 629517f
Show file tree
Hide file tree
Showing 4 changed files with 168 additions and 5 deletions.
4 changes: 4 additions & 0 deletions include/Settings.h
Expand Up @@ -124,6 +124,10 @@ namespace openshot {
/// The audio device name to use during playback
std::string PLAYBACK_AUDIO_DEVICE_NAME = "";

/// The current install path of OpenShot (needs to be set when using Timeline(path), since certain
/// paths depend on the location of OpenShot transitions and files)
std::string PATH_OPENSHOT_INSTALL = "";

/// Create or get an instance of this logger singleton (invoke the class with this method)
static Settings * Instance();
};
Expand Down
7 changes: 7 additions & 0 deletions include/Timeline.h
Expand Up @@ -36,6 +36,7 @@
#include <set>
#include <QtGui/QImage>
#include <QtGui/QPainter>
#include <QtCore/QRegularExpression>
#include "CacheBase.h"
#include "CacheDisk.h"
#include "CacheMemory.h"
Expand Down Expand Up @@ -156,6 +157,7 @@ namespace openshot {
CacheBase *final_cache; ///<Final cache of timeline frames
std::set<FrameMapper*> allocated_frame_mappers; ///< all the frame mappers we allocated and must free
bool managed_cache; ///< Does this timeline instance manage the cache object
std::string path; ///< Optional path of loaded UTF-8 OpenShot JSON project file

/// Process a new layer of video or audio
void add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number, bool is_top_clip, float max_volume);
Expand Down Expand Up @@ -209,6 +211,11 @@ namespace openshot {
/// @param channel_layout The channel layout (i.e. mono, stereo, 3 point surround, etc...)
Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout);

/// @brief Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
/// @param projectPath The path of the UTF-8 *.osp project file (JSON contents). Contents will be loaded automatically.
/// @param convert_absolute_paths Should all paths be converted to absolute paths (based on the folder of the path provided)
Timeline(std::string projectPath, bool convert_absolute_paths);

virtual ~Timeline();

/// @brief Add an openshot::Clip to the timeline
Expand Down
23 changes: 20 additions & 3 deletions src/Clip.cpp
Expand Up @@ -38,6 +38,7 @@
#include "../include/QtImageReader.h"
#include "../include/ChunkReader.h"
#include "../include/DummyReader.h"
#include "../include/Timeline.h"

using namespace openshot;

Expand Down Expand Up @@ -159,7 +160,7 @@ Clip::Clip(std::string path) : resampler(NULL), audio_cache(NULL), reader(NULL),

// Get file extension (and convert to lower case)
std::string ext = get_file_extension(path);
transform(ext.begin(), ext.end(), ext.begin(), ::tolower);
std::transform(ext.begin(), ext.end(), ext.begin(), ::tolower);

// Determine if common video formats
if (ext=="avi" || ext=="mov" || ext=="mkv" || ext=="mpg" || ext=="mpeg" || ext=="mp3" || ext=="mp4" || ext=="mts" ||
Expand All @@ -172,6 +173,16 @@ Clip::Clip(std::string path) : resampler(NULL), audio_cache(NULL), reader(NULL),

} catch(...) { }
}
if (ext=="osp")
{
try
{
// Open common video format
reader = new Timeline(path, true);

} catch(...) { }
}


// If no video found, try each reader
if (!reader)
Expand Down Expand Up @@ -319,12 +330,10 @@ std::shared_ptr<Frame> Clip::GetFrame(int64_t requested_frame)

// Now that we have re-mapped what frame number is needed, go and get the frame pointer
std::shared_ptr<Frame> original_frame;
#pragma omp critical (Clip_GetFrame)
original_frame = GetOrCreateFrame(new_frame_number);

// Create a new frame
std::shared_ptr<Frame> frame(new Frame(new_frame_number, 1, 1, "#000000", original_frame->GetAudioSamplesCount(), original_frame->GetAudioChannelsCount()));
#pragma omp critical (Clip_GetFrame)
{
frame->SampleRate(original_frame->SampleRate());
frame->ChannelsLayout(original_frame->ChannelsLayout());
Expand Down Expand Up @@ -789,6 +798,8 @@ Json::Value Clip::JsonValue() const {

if (reader)
root["reader"] = reader->JsonValue();
else
root["reader"] = Json::Value(Json::objectValue);

// return JsonValue
return root;
Expand Down Expand Up @@ -964,6 +975,12 @@ void Clip::SetJsonValue(const Json::Value root) {
// Create new reader
reader = new DummyReader();
reader->SetJsonValue(root["reader"]);

} else if (type == "Timeline") {

// Create new reader (always load from file again)
// This prevents FrameMappers from being loaded on accident
reader = new Timeline(root["reader"]["path"].asString(), true);
}

// mark as managed reader and set parent
Expand Down
139 changes: 137 additions & 2 deletions src/Timeline.cpp
Expand Up @@ -34,7 +34,7 @@ using namespace openshot;

// Default Constructor for the timeline (which sets the canvas width and height)
Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
is_open(false), auto_map_clips(true), managed_cache(true)
is_open(false), auto_map_clips(true), managed_cache(true), path("")
{
// Create CrashHandler and Attach (incase of errors)
CrashHandler::Instance();
Expand Down Expand Up @@ -64,6 +64,8 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha
info.display_ratio = openshot::Fraction(width, height);
info.display_ratio.Reduce();
info.pixel_ratio = openshot::Fraction(1, 1);
info.acodec = "openshot::timeline";
info.vcodec = "openshot::timeline";

// Init max image size
SetMaxSize(info.width, info.height);
Expand All @@ -73,6 +75,133 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha
final_cache->SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
}

// Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
Timeline::Timeline(std::string projectPath, bool convert_absolute_paths) :
is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath) {

// Create CrashHandler and Attach (incase of errors)
CrashHandler::Instance();

// Init final cache as NULL (will be created after loading json)
final_cache = NULL;

// Init viewport size (curve based, because it can be animated)
viewport_scale = Keyframe(100.0);
viewport_x = Keyframe(0.0);
viewport_y = Keyframe(0.0);

// Init background color
color.red = Keyframe(0.0);
color.green = Keyframe(0.0);
color.blue = Keyframe(0.0);

// Check if path exists
QFileInfo filePath(QString::fromStdString(path));
if (!filePath.exists()) {
throw InvalidFile("File could not be opened.", path);
}

// Check OpenShot Install Path exists
Settings *s = Settings::Instance();
QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
if (!openshotPath.exists()) {
throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
}
QDir openshotTransPath(openshotPath.filePath("transitions"));
if (!openshotTransPath.exists()) {
throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
}

// Determine asset path
QString asset_name = filePath.baseName().left(30) + "_assets";
QDir asset_folder(filePath.dir().filePath(asset_name));
if (!asset_folder.exists()) {
// Create directory if needed
asset_folder.mkpath(".");
}

// Load UTF-8 project file into QString
QFile projectFile(QString::fromStdString(path));
projectFile.open(QFile::ReadOnly);
QString projectContents = QString::fromUtf8(projectFile.readAll());

// Convert all relative paths into absolute paths (if requested)
if (convert_absolute_paths) {

// Find all "image" or "path" references in JSON (using regex). Must loop through match results
// due to our path matching needs, which are not possible with the QString::replace() function.
QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
std::vector<QRegularExpressionMatch> matchedPositions;
QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
while (i.hasNext()) {
QRegularExpressionMatch match = i.next();
if (match.hasMatch()) {
// Push all match objects into a vector (so we can reverse them later)
matchedPositions.push_back(match);
}
}

// Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
std::vector<QRegularExpressionMatch>::reverse_iterator itr;
for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
QRegularExpressionMatch match = *itr;
QString relativeKey = match.captured(1); // image or path
QString relativePath = match.captured(2); // relative file path
QString absolutePath = "";

// Find absolute path of all path, image (including special replacements of @assets and @transitions)
if (relativePath.startsWith("@assets")) {
absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
} else if (relativePath.startsWith("@transitions")) {
absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
} else {
absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
}

// Replace path in JSON content, if an absolute path was successfully found
if (!absolutePath.isEmpty()) {
projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
}
}
// Clear matches
matchedPositions.clear();
}

// Set JSON of project
SetJson(projectContents.toStdString());

// Calculate valid duration and set has_audio and has_video
// based on content inside this Timeline's clips.
float calculated_duration = 0.0;
for (auto clip : clips)
{
float clip_last_frame = clip->Position() + clip->Duration();
if (clip_last_frame > calculated_duration)
calculated_duration = clip_last_frame;
if (clip->Reader() && clip->Reader()->info.has_audio)
info.has_audio = true;
if (clip->Reader() && clip->Reader()->info.has_video)
info.has_video = true;

}
info.video_length = calculated_duration * info.fps.ToFloat();
info.duration = calculated_duration;

// Init FileInfo settings
info.acodec = "openshot::timeline";
info.vcodec = "openshot::timeline";
info.video_timebase = info.fps.Reciprocal();
info.has_video = true;
info.has_audio = true;

// Init max image size
SetMaxSize(info.width, info.height);

// Init cache
final_cache = new CacheMemory();
final_cache->SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
}

Timeline::~Timeline() {
if (is_open)
// Auto Close if not already
Expand Down Expand Up @@ -706,7 +835,8 @@ void Timeline::Close()
is_open = false;

// Clear cache
final_cache->Clear();
if (final_cache)
final_cache->Clear();
}

// Open the reader (and start consuming resources)
Expand Down Expand Up @@ -984,6 +1114,7 @@ Json::Value Timeline::JsonValue() const {
root["viewport_x"] = viewport_x.JsonValue();
root["viewport_y"] = viewport_y.JsonValue();
root["color"] = color.JsonValue();
root["path"] = path;

// Add array of clips
root["clips"] = Json::Value(Json::arrayValue);
Expand Down Expand Up @@ -1037,6 +1168,10 @@ void Timeline::SetJsonValue(const Json::Value root) {
// Set parent data
ReaderBase::SetJsonValue(root);

// Set data from Json (if key is found)
if (!root["path"].isNull())
path = root["path"].asString();

if (!root["clips"].isNull()) {
// Clear existing clips
clips.clear();
Expand Down

0 comments on commit 629517f

Please sign in to comment.