diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f3ac30332..16bb0327e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,4 +1,4 @@ -name: CI Build +name: libopenshot CI Build on: [push, pull_request] jobs: build: @@ -13,6 +13,10 @@ jobs: steps: - uses: actions/checkout@v2 + # Work around a codecov issue detecting commit SHAs + # see: https://community.codecov.io/t/issue-detecting-commit-sha-please-run-actions-checkout-with-fetch-depth-1-or-set-to-0/2571 + with: + fetch-depth: 0 - uses: haya14busa/action-cond@v1 id: coverage @@ -31,8 +35,13 @@ jobs: libopenshot-audio-dev \ qtbase5-dev qtbase5-dev-tools \ libfdk-aac-dev libavcodec-dev libavformat-dev libavdevice-dev libavutil-dev libavfilter-dev libswscale-dev libpostproc-dev libswresample-dev \ - libzmq3-dev libmagick++-dev libunittest++-dev \ + libzmq3-dev libmagick++-dev \ libopencv-dev libprotobuf-dev protobuf-compiler + # Install catch2 package from Ubuntu 20.10, since for some reason + # even 20.04 only has Catch 1.12.1 available. + wget https://launchpad.net/ubuntu/+archive/primary/+files/catch2_2.13.0-1_all.deb + sudo dpkg -i catch2_2.13.0-1_all.deb + - name: Build libopenshot shell: bash @@ -47,7 +56,7 @@ jobs: shell: bash run: | pushd build - cmake --build . --target os_test -- VERBOSE=1 + cmake --build . --target coverage -- VERBOSE=1 popd - name: Install libopenshot @@ -61,4 +70,3 @@ jobs: if: ${{ matrix.compiler == 'clang' }} with: file: build/coverage.info - diff --git a/CMakeLists.txt b/CMakeLists.txt index 85aa26409..cf73a2829 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -73,10 +73,15 @@ include(FeatureSummary) # Optional build settings for libopenshot option(USE_SYSTEM_JSONCPP "Use system installed JsonCpp, if found" ON) option(DISABLE_BUNDLED_JSONCPP "Don't fall back to bundled JsonCpp" OFF) + option(ENABLE_IWYU "Enable 'Include What You Use' scanner (CMake 3.3+)" OFF) -option(ENABLE_TESTS "Build unit tests (requires UnitTest++)" ON) + +option(ENABLE_TESTS "Build unit tests (requires Catch2)" ON) +option(ENABLE_PARALLEL_CTEST "Run CTest using multiple processors" ON) option(ENABLE_COVERAGE "Scan test coverage using gcov and report" OFF) + option(ENABLE_DOCS "Build API documentation (requires Doxygen)" ON) + option(APPIMAGE_BUILD "Build to install in an AppImage (Linux only)" OFF) option(ENABLE_MAGICK "Use ImageMagick, if available" ON) option(ENABLE_OPENCV "Build with OpenCV algorithms (requires Boost, Protobuf 3)" ON) @@ -87,7 +92,7 @@ if (DISABLE_TESTS) endif() if(DEFINED ENABLE_TESTS) - set(ENABLE_TESTS ${ENABLE_TESTS} CACHE BOOL "Build unit tests (requires UnitTest++)" FORCE) + set(ENABLE_TESTS ${ENABLE_TESTS} CACHE BOOL "Build unit tests (requires Catch2)" FORCE) endif() #### Work around a GCC < 9 bug with handling of _Pragma() in macros @@ -109,7 +114,7 @@ ENDIF(WIN32) ############## Code Coverage ######################### if (ENABLE_COVERAGE AND NOT ENABLE_TESTS) message(WARNING "ENABLE_COVERAGE requires unit tests, forcing ENABLE_TESTS") - set(ENABLE_TESTS ON CACHE BOOL "Don't build unit tests" FORCE) + set(ENABLE_TESTS ON CACHE BOOL "Build unit tests (requires Catch2 or UnitTest++)" FORCE) endif() if (ENABLE_COVERAGE) @@ -166,55 +171,87 @@ if (ENABLE_DOCS) OPTIONAL ) # No error if the docs aren't found endif() endif() -add_feature_info("Documentation" DOCS_ENABLED "Build API documentation with 'make doc'") ############# PROCESS tests/ DIRECTORY ############## if(ENABLE_TESTS) set(TESTS_ENABLED TRUE) # May be overridden by tests/CMakeLists.txt + find_package(Catch2 REQUIRED) + if(ENABLE_PARALLEL_CTEST) + # Figure out the amount of parallelism for CTest + include(ProcessorCount) + ProcessorCount(CPU_COUNT) + if(NOT CPU_COUNT EQUAL 0) + message(STATUS "Setting up unit tests to use ${CPU_COUNT} processors") + set(CTEST_OPTIONS "-j${CPU_COUNT}") + endif() + endif() + include(CTest) + include(Catch) add_subdirectory(tests) endif() add_feature_info("Unit tests" TESTS_ENABLED "Compile unit tests for library functions") ############## COVERAGE REPORTING ################# -if (ENABLE_COVERAGE) +if (ENABLE_COVERAGE AND DEFINED UNIT_TEST_TARGETS) setup_target_for_coverage_lcov( NAME coverage LCOV_ARGS "--no-external" - EXECUTABLE openshot-test - DEPENDENCIES openshot openshot-test + EXECUTABLE ctest + EXECUTABLE_ARGS ${CTEST_OPTIONS} + DEPENDENCIES openshot ${UNIT_TEST_TARGETS} EXCLUDE "bindings/*" "examples/*" "${CMAKE_CURRENT_BINARY_DIR}/bindings/*" "${CMAKE_CURRENT_BINARY_DIR}/src/*_autogen/*" ) - if(NOT TARGET os_test) - add_custom_target(os_test) - add_dependencies(os_test coverage) - endif() endif() -# Also hook up 'test' as an alias for the 'os_test' target, if possible -# This requires CMake 3.11+, where the CMP0037 policy -# configured to 'NEW' mode will not reserve target names -# unless the corresponding feature is actually used -if (POLICY CMP0037) - cmake_policy(SET CMP0037 NEW) +if(TESTS_ENABLED AND NOT TARGET coverage) + add_custom_target(coverage + COMMAND ctest ${CTEST_OPTIONS} + DEPENDS openshot ${UNIT_TEST_TARGETS} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMENT "Running unit tests (coverage disabled)" + ) endif() -if(TARGET os_test) - if (CMAKE_VERSION VERSION_GREATER 3.11) - message(STATUS "Cmake 3.11+ detected, enabling 'test' target") - add_custom_target(test) - add_dependencies(test os_test) - set(TEST_TARGET_NAME "test") - else() - set(TEST_TARGET_NAME "os_test") + +if(TARGET test AND NOT TARGET os_test) + add_custom_target(os_test) + add_dependencies(os_test coverage) +endif() + +if(TARGET os_test AND NOT TARGET test AND CMAKE_VERSION VERSION_GREATER 3.11) + # Also hook up 'test' as an alias for the 'os_test' target, if possible + # This requires CMake 3.11+, where the CMP0037 policy + # configured to 'NEW' mode will not reserve target names + # unless the corresponding feature is actually used + if (POLICY CMP0037) + cmake_policy(SET CMP0037 NEW) endif() - add_feature_info("Testrunner" ENABLE_TESTS "Run unit tests with 'make ${TEST_TARGET_NAME}'") + message(STATUS "Cmake 3.11+ detected, enabling 'test' target") + add_custom_target(test) + add_dependencies(test os_test) endif() +### +### Add feature-summary details on non-default built targets +### +set(optional_targets test os_test coverage doc) +set(target_test_description "Build and execute unit tests") +set(target_os_test_description "Build and execute unit tests (legacy target)") +set(target_coverage_description "Run unit tests and (if enabled) collect coverage data") +set(target_doc_description "Build formatted API documentation (HTML+SVG)") +foreach(_tname IN LISTS optional_targets) + if(TARGET ${_tname}) + add_feature_info("Non-default target '${_tname}'" TRUE ${target_${_tname}_description}) + else() + message(DEBUG "No target ${_tname}") + endif() +endforeach() + ########### PRINT FEATURE SUMMARY ############## feature_summary(WHAT ALL INCLUDE_QUIET_PACKAGES FATAL_ON_MISSING_REQUIRED_PACKAGES - DESCRIPTION "Displaying feature summary\n\nBuild configuration:") + DESCRIPTION "Build configuration:") diff --git a/Doxyfile.in b/Doxyfile.in index 343190a17..35c00d97b 100644 --- a/Doxyfile.in +++ b/Doxyfile.in @@ -1,4 +1,4 @@ -# Doxyfile 1.8.15 +# Doxyfile 1.8.20 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. @@ -187,6 +187,16 @@ SHORT_NAMES = NO JAVADOC_AUTOBRIEF = NO +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus @@ -207,6 +217,14 @@ QT_AUTOBRIEF = NO MULTILINE_CPP_IS_BRIEF = NO +# By default Python docstrings are displayed as preformatted text and doxygen's +# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the +# doxygen's special commands can be used and the contents of the docstring +# documentation blocks is shown as doxygen documentation. +# The default value is: YES. + +PYTHON_DOCSTRING = YES + # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. @@ -243,12 +261,6 @@ TAB_SIZE = 8 ALIASES = -# This tag can be used to specify a number of word-keyword mappings (TCL only). -# A mapping has the form "name=value". For example adding "class=itcl::class" -# will allow you to use the command class in the itcl::class meaning. - -TCL_SUBST = - # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all @@ -289,14 +301,14 @@ OPTIMIZE_OUTPUT_SLICE = NO # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and -# language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL, # Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: # FortranFree, unknown formatted Fortran: Fortran. In the later case the parser # tries to guess whether the code is fixed or free formatted code, this is the -# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is -# Fortran), use: inc=Fortran f=C. +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # @@ -319,7 +331,7 @@ MARKDOWN_SUPPORT = YES # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. -# Minimum value: 0, maximum value: 99, default value: 0. +# Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 0 @@ -435,6 +447,19 @@ TYPEDEF_HIDES_STRUCT = NO LOOKUP_CACHE_SIZE = 0 +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which efficively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- @@ -455,6 +480,12 @@ EXTRACT_ALL = YES EXTRACT_PRIVATE = NO +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. @@ -509,8 +540,8 @@ HIDE_UNDOC_MEMBERS = NO HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend -# (class|struct|union) declarations. If set to NO, these declarations will be -# included in the documentation. +# declarations. If set to NO, these declarations will be included in the +# documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO @@ -533,7 +564,7 @@ INTERNAL_DOCS = NO # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. +# (including Cygwin) and Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = YES @@ -805,7 +836,7 @@ WARN_LOGFILE = INPUT = "@PROJECT_SOURCE_DIR@/src" \ "@PROJECT_SOURCE_DIR@/doc" \ - "@PROJECT_BINARY_DIR@/src/OpenShotVersion.h" + "@PROJECT_BINARY_DIR@/src/OpenShotVersion.h" # This tag can be used to specify the character encoding of the source files @@ -828,8 +859,10 @@ INPUT_ENCODING = UTF-8 # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, -# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, -# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. +# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), +# *.doc (to be provided as doxygen C comment), *.txt (to be provided as doxygen +# C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, +# *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = @@ -1043,6 +1076,38 @@ USE_HTAGS = NO VERBATIM_HEADERS = YES +# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the +# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the +# cost of reduced performance. This can be particularly helpful with template +# rich C++ code for which doxygen's built-in parser lacks the necessary type +# information. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = NO + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = + +# If clang assisted parsing is enabled you can provide the clang parser with the +# path to the directory containing a file called compile_commands.json. This +# file is the compilation database (see: +# http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the +# options used when the source files were built. This is equivalent to +# specifying the "-p" option to a clang tool, such as clang-check. These options +# will then be passed to the parser. Any options specified with CLANG_OPTIONS +# will be added as well. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. + +CLANG_DATABASE_PATH = + #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- @@ -1199,9 +1264,9 @@ HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that -# are dynamically created via Javascript. If disabled, the navigation index will +# are dynamically created via JavaScript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML -# page. Disable this option to support browsers that do not have Javascript, +# page. Disable this option to support browsers that do not have JavaScript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1307,7 +1372,7 @@ CHM_FILE = HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated -# (YES) or that it should be included in the master .chm file (NO). +# (YES) or that it should be included in the main .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. @@ -1352,7 +1417,7 @@ QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace -# (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1360,7 +1425,7 @@ QHP_NAMESPACE = # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- +# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1369,7 +1434,7 @@ QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1377,7 +1442,7 @@ QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1385,7 +1450,7 @@ QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = @@ -1469,6 +1534,17 @@ TREEVIEW_WIDTH = 250 EXT_LINKS_IN_WINDOW = NO +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = png + # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML @@ -1489,8 +1565,14 @@ FORMULA_FONTSIZE = 10 FORMULA_TRANSPARENT = YES +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# https://www.mathjax.org) which uses client side Javascript for the rendering +# https://www.mathjax.org) which uses client side JavaScript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path @@ -1518,7 +1600,7 @@ MATHJAX_FORMAT = HTML-CSS # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. -# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/. +# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest @@ -1560,7 +1642,7 @@ MATHJAX_CODEFILE = SEARCHENGINE = NO # When the SERVER_BASED_SEARCH tag is enabled the search engine will be -# implemented using a web server instead of a web client using Javascript. There +# implemented using a web server instead of a web client using JavaScript. There # are two flavors of web server based searching depending on the EXTERNAL_SEARCH # setting. When disabled, doxygen will generate a PHP script for searching and # an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing @@ -1664,10 +1746,11 @@ LATEX_CMD_NAME = "@LATEX_COMPILER@" MAKEINDEX_CMD_NAME = "@MAKEINDEX_COMPILER@" # The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to -# generate index for LaTeX. +# generate index for LaTeX. In case there is no backslash (\) as first character +# it will be automatically added in the LaTeX code. # Note: This tag is used in the generated output file (.tex). # See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat. -# The default value is: \makeindex. +# The default value is: makeindex. # This tag requires that the tag GENERATE_LATEX is set to YES. LATEX_MAKEINDEX_CMD = \makeindex @@ -1756,9 +1839,11 @@ LATEX_EXTRA_FILES = PDF_HYPERLINKS = YES -# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate -# the PDF file directly from the LaTeX files. Set this option to YES, to get a -# higher quality PDF documentation. +# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as +# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX +# files. Set this option to YES, to get a higher quality PDF documentation. +# +# See also section LATEX_CMD_NAME for selecting the engine. # The default value is: YES. # This tag requires that the tag GENERATE_LATEX is set to YES. @@ -2092,7 +2177,9 @@ INCLUDE_FILE_PATTERNS = "*.h" # recursively expanded use the := operator instead of the = operator. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -PREDEFINED = USE_BLACKMAGIC USE_IMAGEMAGICK +PREDEFINED = USE_BLACKMAGIC \ + USE_IMAGEMAGICK \ + USE_OPENCV # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # tag can be used to specify a list of macro names that should be expanded. The @@ -2159,12 +2246,6 @@ EXTERNAL_GROUPS = YES EXTERNAL_PAGES = YES -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of 'which perl'). -# The default file (with absolute path) is: /usr/bin/perl. - -PERL_PATH = /usr/bin/perl - #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- @@ -2178,15 +2259,6 @@ PERL_PATH = /usr/bin/perl CLASS_DIAGRAMS = YES -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see: -# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - # You can include diagrams made with dia in doxygen documentation. Doxygen will # then run dia to produce the diagram and insert it in the documentation. The # DIA_PATH tag allows you to specify the directory where the dia binary resides. diff --git a/README.md b/README.md index a03ce11ec..f255e8984 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ solutions to the world. ## Build Status -[![Build Status](https://img.shields.io/travis/OpenShot/libopenshot/develop.svg?label=libopenshot)](https://travis-ci.org/OpenShot/libopenshot) [![Build Status](https://img.shields.io/travis/OpenShot/libopenshot-audio/develop.svg?label=libopenshot-audio)](https://travis-ci.org/OpenShot/libopenshot-audio) +[![libopenshot CI Build](https://github.com/OpenShot/libopenshot/actions/workflows/ci.yml/badge.svg)](https://github.com/OpenShot/libopenshot/actions/workflows/ci.yml) [![libopenshot-audio CI Build](https://github.com/OpenShot/libopenshot-audio/actions/workflows/ci.yml/badge.svg)](https://github.com/OpenShot/libopenshot-audio/actions/workflows/ci.yml) ## Features diff --git a/examples/Example.cpp b/examples/Example.cpp index a11bb5aba..52a307434 100644 --- a/examples/Example.cpp +++ b/examples/Example.cpp @@ -31,8 +31,10 @@ #include #include #include -#include "OpenShot.h" -#include "CrashHandler.h" +#include "Clip.h" +#include "Frame.h" +#include "FFmpegReader.h" +#include "Timeline.h" using namespace openshot; @@ -53,16 +55,17 @@ int main(int argc, char* argv[]) { const auto time1 = std::chrono::high_resolution_clock::now(); std::shared_ptr f = r9.GetFrame(frame); const auto time2 = std::chrono::high_resolution_clock::now(); - std::cout << "FFmpegReader: " << frame << " (" << double_ms(time2 - time1).count() << " ms)" << std::endl; + std::cout << "FFmpegReader: " << frame + << " (" << double_ms(time2 - time1).count() << " ms)\n"; } const auto total_2 = std::chrono::high_resolution_clock::now(); auto total_sec = std::chrono::duration_cast(total_2 - total_1); - std::cout << "FFmpegReader TOTAL: " << total_sec.count() << " ms" << std::endl; + std::cout << "FFmpegReader TOTAL: " << total_sec.count() << " ms\n"; r9.Close(); // Timeline Reader performance test - Timeline tm(r9.info.width, r9.info.height, r9.info.fps, r9.info.sample_rate, r9.info.channels, r9.info.channel_layout); + Timeline tm(r9.info); Clip *c = new Clip(&r9); tm.AddClip(c); tm.Open(); @@ -73,14 +76,15 @@ int main(int argc, char* argv[]) { const auto time1 = std::chrono::high_resolution_clock::now(); std::shared_ptr f = tm.GetFrame(frame); const auto time2 = std::chrono::high_resolution_clock::now(); - std::cout << "Timeline: " << frame << " (" << double_ms(time2 - time1).count() << " ms)" << std::endl; + std::cout << "Timeline: " << frame + << " (" << double_ms(time2 - time1).count() << " ms)\n"; } const auto total_4 = std::chrono::high_resolution_clock::now(); total_sec = std::chrono::duration_cast(total_4 - total_3); - std::cout << "Timeline TOTAL: " << total_sec.count() << " ms" << std::endl; + std::cout << "Timeline TOTAL: " << total_sec.count() << " ms\n"; tm.Close(); - std::cout << "Completed successfully!" << std::endl; + std::cout << "Completed successfully!\n"; return 0; } diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 387ab59e8..ced0bd1a4 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -455,9 +455,6 @@ endif() if(UNIX AND NOT APPLE) set(CPACK_GENERATOR "DEB") endif() -#if(UNIX AND APPLE) -# set(CPACK_GENERATOR "DragNDrop") -#endif() set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Jonathan Thomas") #required include(CPack) diff --git a/src/CVObjectDetection.cpp b/src/CVObjectDetection.cpp index b262f8c51..53533cbff 100644 --- a/src/CVObjectDetection.cpp +++ b/src/CVObjectDetection.cpp @@ -488,10 +488,6 @@ bool CVObjectDetection::_LoadObjDetectdData(){ detectionsData[id] = CVDetectionData(classIds, confidences, boxes, id, objectIds); } - // Show the time stamp from the last update in object detector data file - if (objMessage.has_last_updated()) - cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(objMessage.last_updated()) << endl; - // Delete all global objects allocated by libprotobuf. google::protobuf::ShutdownProtobufLibrary(); diff --git a/src/CVObjectDetection.h b/src/CVObjectDetection.h index 3ec802172..ba62be956 100644 --- a/src/CVObjectDetection.h +++ b/src/CVObjectDetection.h @@ -130,7 +130,7 @@ namespace openshot // Add frame object detection data into protobuf message. void AddFrameDataToProto(pb_objdetect::Frame* pbFrameData, CVDetectionData& dData); - /// Get and Set JSON methods + // Get and Set JSON methods void SetJson(const std::string value); ///< Load JSON string into this object void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object diff --git a/src/CVStabilization.cpp b/src/CVStabilization.cpp index 864f9b583..3076ada8b 100644 --- a/src/CVStabilization.cpp +++ b/src/CVStabilization.cpp @@ -90,9 +90,6 @@ void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t // Update progress processingController->SetProgress(uint(100*(frame_number-start)/(end-start))); } - // Show average and max transformation parameters - std::cout<<"\nAVERAGE DX: "< trajectory = ComputeFramesTrajectory(); @@ -194,9 +191,6 @@ bool CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){ prev_to_cur_transform.push_back(TransformParam(dx, dy, da)); frame.copyTo(prev_grey); - // Show processing info - cout << "Frame: " << frameNum << " - good optical flow: " << prev_corner2.size() << endl; - return true; } @@ -423,11 +417,6 @@ bool CVStabilization::_LoadStabilizedData(){ transformationData[id] = TransformParam(dx,dy,da); } - // Show the time stamp from the last update in stabilization data file - if (stabilizationMessage.has_last_updated()) { - cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(stabilizationMessage.last_updated()) << endl; - } - // Delete all global objects allocated by libprotobuf. google::protobuf::ShutdownProtobufLibrary(); diff --git a/src/CVStabilization.h b/src/CVStabilization.h index 7c41ac7c0..96cbf7125 100644 --- a/src/CVStabilization.h +++ b/src/CVStabilization.h @@ -101,13 +101,13 @@ class CVStabilization { /// Will handle a Thread safely comutication between ClipProcessingJobs and the processing effect classes ProcessingController *processingController; - // Track current frame features and find the relative transformation + /// Track current frame features and find the relative transformation bool TrackFrameFeatures(cv::Mat frame, size_t frameNum); std::vector ComputeFramesTrajectory(); std::map SmoothTrajectory(std::vector &trajectory); - // Generate new transformations parameters for each frame to follow the smoothed trajectory + /// Generate new transformations parameters for each frame to follow the smoothed trajectory std::map GenNewCamPosition(std::map &smoothed_trajectory); public: @@ -115,23 +115,23 @@ class CVStabilization { std::map trajectoryData; // Save camera trajectory data std::map transformationData; // Save transormation data - // Set default smoothing window value to compute stabilization + /// Set default smoothing window value to compute stabilization CVStabilization(std::string processInfoJson, ProcessingController &processingController); - // Process clip and store necessary stabilization data + /// Process clip and store necessary stabilization data void stabilizeClip(openshot::Clip& video, size_t _start=0, size_t _end=0, bool process_interval=false); /// Protobuf Save and Load methods - // Save stabilization data to protobuf file + /// Save stabilization data to protobuf file bool SaveStabilizedData(); - // Add frame stabilization data into protobuf message + /// Add frame stabilization data into protobuf message void AddFrameDataToProto(pb_stabilize::Frame* pbFrameData, CamTrajectory& trajData, TransformParam& transData, size_t frame_number); // Return requested struct info for a given frame TransformParam GetTransformParamData(size_t frameId); CamTrajectory GetCamTrajectoryTrackedData(size_t frameId); - /// Get and Set JSON methods + // Get and Set JSON methods void SetJson(const std::string value); ///< Load JSON string into this object void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object diff --git a/src/CVTracker.cpp b/src/CVTracker.cpp index 5eab0d4cf..ee29894ae 100644 --- a/src/CVTracker.cpp +++ b/src/CVTracker.cpp @@ -366,11 +366,6 @@ bool CVTracker::_LoadTrackedData(){ trackedDataById[id] = FrameData(id, rotation, x1, y1, x2, y2); } - // Show the time stamp from the last update in tracker data file - if (trackerMessage.has_last_updated()) { - cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(trackerMessage.last_updated()) << endl; - } - // Delete all global objects allocated by libprotobuf. google::protobuf::ShutdownProtobufLibrary(); diff --git a/src/CVTracker.h b/src/CVTracker.h index f5a090cf6..4ac28df53 100644 --- a/src/CVTracker.h +++ b/src/CVTracker.h @@ -116,24 +116,27 @@ namespace openshot // Constructor CVTracker(std::string processInfoJson, ProcessingController &processingController); - // Set desirable tracker method + /// Set desirable tracker method cv::Ptr selectTracker(std::string trackerType); - // Track object in the hole clip or in a given interval - // If start, end and process_interval are passed as argument, clip will be processed in [start,end) + /// Track object in the hole clip or in a given interval + /// + /// If start, end and process_interval are passed as argument, clip will be processed in [start,end) void trackClip(openshot::Clip& video, size_t _start=0, size_t _end=0, bool process_interval=false); - // Filter current bounding box jitter + + /// Filter current bounding box jitter cv::Rect2d filter_box_jitter(size_t frameId); - // Get tracked data for a given frame + + /// Get tracked data for a given frame FrameData GetTrackedData(size_t frameId); - /// Protobuf Save and Load methods - // Save protobuf file + // Protobuf Save and Load methods + /// Save protobuf file bool SaveTrackedData(); - // Add frame tracked data into protobuf message. + /// Add frame tracked data into protobuf message. void AddFrameDataToProto(pb_tracker::Frame* pbFrameData, FrameData& fData); - /// Get and Set JSON methods + // Get and Set JSON methods void SetJson(const std::string value); ///< Load JSON string into this object void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object diff --git a/src/CacheBase.h b/src/CacheBase.h index 0033c8825..b0a64f459 100644 --- a/src/CacheBase.h +++ b/src/CacheBase.h @@ -107,7 +107,7 @@ namespace openshot { /// @param channels The number of audio channels in the frame void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels); - /// Get and Set JSON methods + // Get and Set JSON methods virtual std::string Json() = 0; ///< Generate JSON string of this object virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object virtual Json::Value JsonValue() = 0; ///< Generate Json::Value for this object diff --git a/src/CacheDisk.h b/src/CacheDisk.h index ef63686b4..764497b75 100644 --- a/src/CacheDisk.h +++ b/src/CacheDisk.h @@ -124,7 +124,7 @@ namespace openshot { /// @param end_frame_number The ending frame number of the cached frame void Remove(int64_t start_frame_number, int64_t end_frame_number); - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json(); ///< Generate JSON string of this object void SetJson(const std::string value); ///< Load JSON string into this object Json::Value JsonValue(); ///< Generate Json::Value for this object diff --git a/src/CacheMemory.h b/src/CacheMemory.h index e5402ad9c..c693cb664 100644 --- a/src/CacheMemory.h +++ b/src/CacheMemory.h @@ -108,7 +108,7 @@ namespace openshot { /// @param end_frame_number The ending frame number of the cached frame void Remove(int64_t start_frame_number, int64_t end_frame_number); - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json(); ///< Generate JSON string of this object void SetJson(const std::string value); ///< Load JSON string into this object Json::Value JsonValue(); ///< Generate Json::Value for this object diff --git a/src/ChunkReader.h b/src/ChunkReader.h index 7a041f714..775e20d28 100644 --- a/src/ChunkReader.h +++ b/src/ChunkReader.h @@ -150,7 +150,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "ChunkReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/Clip.h b/src/Clip.h index 7fff971bb..d15991e9f 100644 --- a/src/Clip.h +++ b/src/Clip.h @@ -37,13 +37,14 @@ #include #include #undef uint64 - #undef int64 - + #undef int64 + #endif #include #include #include + #include "AudioResampler.h" #include "ClipBase.h" #include "Color.h" @@ -121,12 +122,13 @@ namespace openshot { private: bool waveform; ///< Should a waveform be used instead of the clip's image - std::list effects; /// Is Reader opened + std::list effects; ///< List of clips on this timeline + bool is_open; ///< Is Reader opened std::string parentObjectId; ///< Id of the bounding box that this clip is attached to std::shared_ptr parentTrackedObject; ///< Tracked object this clip is attached to openshot::Clip* parentClipObject; ///< Clip object this clip is attached to + // Audio resampler (if time mapping) openshot::AudioResampler *resampler; @@ -262,11 +264,11 @@ namespace openshot { /// Get the current reader openshot::ReaderBase* Reader(); - /// Override End() method + // Override End() method float End() const; ///< Get end position (in seconds) of clip (trim end of video), which can be affected by the time curve. void End(float value) { end = value; } ///< Set end position (in seconds) of clip (trim end of video) - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object @@ -280,7 +282,7 @@ namespace openshot { /// @param effect Remove an effect from the clip. void RemoveEffect(openshot::EffectBase* effect); - /// Waveform property + // Waveform property bool Waveform() { return waveform; } ///< Get the waveform property of this clip void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip @@ -315,11 +317,11 @@ namespace openshot { openshot::Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4 openshot::Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4 - /// Audio channel filter and mappings + // Audio channel filter and mappings openshot::Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels) openshot::Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel) - /// Override has_video and has_audio properties of clip (and their readers) + // Override has_video and has_audio properties of clip (and their readers) openshot::Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes) openshot::Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes) }; diff --git a/src/ClipBase.h b/src/ClipBase.h index 484b18e53..c38b9790a 100644 --- a/src/ClipBase.h +++ b/src/ClipBase.h @@ -103,7 +103,7 @@ namespace openshot { /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. virtual std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) = 0; - /// Get basic properties + // Get basic properties std::string Id() const { return id; } ///< Get the Id of this clip object float Position() const { return position; } ///< Get position on timeline (in seconds) int Layer() const { return layer; } ///< Get layer of clip on timeline (lower number is covered by higher numbers) @@ -112,7 +112,7 @@ namespace openshot { float Duration() const { return end - start; } ///< Get the length of this clip (in seconds) openshot::TimelineBase* ParentTimeline() { return timeline; } ///< Get the associated Timeline pointer (if any) - /// Set basic properties + // Set basic properties void Id(std::string value) { id = value; } ///> Set the Id of this clip object void Position(float value) { position = value; } ///< Set position on timeline (in seconds) void Layer(int value) { layer = value; } ///< Set layer of clip on timeline (lower number is covered by higher numbers) @@ -120,7 +120,7 @@ namespace openshot { void End(float value) { end = value; } ///< Set end position (in seconds) of clip (trim end of video) void ParentTimeline(openshot::TimelineBase* new_timeline) { timeline = new_timeline; } ///< Set associated Timeline pointer - /// Get and Set JSON methods + // Get and Set JSON methods virtual std::string Json() const = 0; ///< Generate JSON string of this object virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object diff --git a/src/Color.h b/src/Color.h index 564af4ef9..8b7eaae24 100644 --- a/src/Color.h +++ b/src/Color.h @@ -71,7 +71,7 @@ namespace openshot { /// Get the distance between 2 RGB pairs. (0=identical colors, 10=very close colors, 760=very different colors) static long GetDistance(long R1, long G1, long B1, long R2, long G2, long B2); - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const; ///< Generate JSON string of this object Json::Value JsonValue() const; ///< Generate Json::Value for this object void SetJson(const std::string value); ///< Load JSON string into this object diff --git a/src/Coordinate.h b/src/Coordinate.h index 35fa966a4..0a3ba9785 100644 --- a/src/Coordinate.h +++ b/src/Coordinate.h @@ -68,7 +68,7 @@ namespace openshot { /// @param co A std::pair tuple containing (X, Y) Coordinate(const std::pair& co); - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const; ///< Generate JSON string of this object Json::Value JsonValue() const; ///< Generate Json::Value for this object void SetJson(const std::string value); ///< Load JSON string into this object diff --git a/src/CrashHandler.h b/src/CrashHandler.h index 76d788f29..2ee200247 100644 --- a/src/CrashHandler.h +++ b/src/CrashHandler.h @@ -63,7 +63,6 @@ namespace openshot { CrashHandler(CrashHandler const&) = delete; // Don't allow the user to copy this instance /// Default assignment operator - //CrashHandler & operator=(CrashHandler const&){}; // Don't allow the user to assign this instance CrashHandler & operator=(CrashHandler const&) = delete; // Don't allow the user to assign this instance /// Private variable to keep track of singleton instance diff --git a/src/DecklinkReader.h b/src/DecklinkReader.h index 6008e5eb7..1a54d2f9b 100644 --- a/src/DecklinkReader.h +++ b/src/DecklinkReader.h @@ -116,7 +116,7 @@ namespace openshot /// Return the type name of the class std::string Name() { return "DecklinkReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value); ///< Load JSON string into this object Json::Value JsonValue() const; ///< Generate Json::Value for this object diff --git a/src/DummyReader.h b/src/DummyReader.h index 280ed171f..8464f8d5f 100644 --- a/src/DummyReader.h +++ b/src/DummyReader.h @@ -142,7 +142,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "DummyReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/EffectBase.h b/src/EffectBase.h index 8b45442b9..6e7418506 100644 --- a/src/EffectBase.h +++ b/src/EffectBase.h @@ -80,7 +80,7 @@ namespace openshot EffectBase* parentEffect; /// Map of Tracked Object's by their indices (used by Effects that track objects on clips) - std::map > trackedObjects; + std::map > trackedObjects; /// Information about the current effect EffectInfoStruct info; @@ -100,7 +100,7 @@ namespace openshot /// Set parent clip object of this effect void ParentClip(openshot::ClipBase* new_clip); - + /// Set the parent effect from which this properties will be set to void SetParentEffect(std::string parentEffect_id); @@ -110,7 +110,7 @@ namespace openshot /// Get the indexes and IDs of all visible objects in the given frame virtual std::string GetVisibleObjects(int64_t frame_number) const {return {}; }; - /// Get and Set JSON methods + // Get and Set JSON methods virtual std::string Json() const = 0; ///< Generate JSON string of this object virtual void SetJson(std::string value) = 0; ///< Load JSON string into this object virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object @@ -122,7 +122,7 @@ namespace openshot virtual void SetJson(int64_t requested_frame, const std::string value) { return; }; - + Json::Value JsonInfo() const; ///< Generate JSON object of meta data / info /// Get the order that this effect should be executed. diff --git a/src/EffectInfo.h b/src/EffectInfo.h index e3e20b47c..8d4870964 100644 --- a/src/EffectInfo.h +++ b/src/EffectInfo.h @@ -47,10 +47,10 @@ namespace openshot class EffectInfo { public: - // Create an instance of an effect (factory style) + /// Create an instance of an effect (factory style) EffectBase* CreateEffect(std::string effect_type); - /// JSON methods + // JSON methods static std::string Json(); ///< Generate JSON string of this object static Json::Value JsonValue(); ///< Generate Json::Value for this object diff --git a/src/Exceptions.h b/src/Exceptions.h index 46137d532..de0ba64ea 100644 --- a/src/Exceptions.h +++ b/src/Exceptions.h @@ -41,7 +41,7 @@ namespace openshot { * A std::exception-derived exception class with custom message. * All OpenShot exception classes inherit from this class. */ - class ExceptionBase : public std::exception //: public exception + class ExceptionBase : public std::exception { protected: std::string m_message; @@ -370,7 +370,7 @@ namespace openshot { #ifndef SWIG /// Exception when too many seek attempts happen class - __attribute__ ((deprecated (TMS_DEP_MSG) )) + __attribute__ ((deprecated(TMS_DEP_MSG))) TooManySeeks : public ExceptionBase { public: @@ -381,11 +381,7 @@ namespace openshot { * @param message A message to accompany the exception * @param file_path (optional) The input file being processed */ - TooManySeeks(std::string message, std::string file_path="") -#ifndef SWIG - __attribute__ ((deprecated (TMS_DEP_MSG) )) -#endif - : ExceptionBase(message), file_path(file_path) { } + TooManySeeks(std::string message, std::string file_path="") __attribute__ ((deprecated(TMS_DEP_MSG))); virtual ~TooManySeeks() noexcept {} }; #endif diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index a64e52303..e4eaab2c8 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -1243,13 +1243,13 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) { processing_video_frames[current_frame] = current_frame; // Create variables for a RGB Frame (since most videos are not in RGB, we must convert it) - AVFrame *pFrameRGB = NULL; - uint8_t *buffer = NULL; + AVFrame *pFrameRGB = nullptr; + uint8_t *buffer = nullptr; // Allocate an AVFrame structure pFrameRGB = AV_ALLOCATE_FRAME(); - if (pFrameRGB == NULL) - throw OutOfBoundsFrame("Convert Image Broke!", current_frame, video_length); + if (pFrameRGB == nullptr) + throw OutOfMemory("Failed to allocate frame buffer", path); // Determine the max size of this source image (based on the timeline's size, the scaling mode, // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, diff --git a/src/FFmpegReader.h b/src/FFmpegReader.h index b1758dd36..312dfc8ef 100644 --- a/src/FFmpegReader.h +++ b/src/FFmpegReader.h @@ -265,7 +265,7 @@ namespace openshot { /// Return the type name of the class std::string Name() override { return "FFmpegReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 0cc4daa2d..b3965ff66 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1669,10 +1669,8 @@ void FFmpegWriter::write_audio_packets(bool is_final) { av_opt_set_int(avr, "out_channels", info.channels, 0); SWR_INIT(avr); } - int nb_samples = 0; - // Convert audio samples - nb_samples = SWR_CONVERT( + int nb_samples = SWR_CONVERT( avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) @@ -1683,7 +1681,7 @@ void FFmpegWriter::write_audio_packets(bool is_final) { ); // Set remaining samples - remaining_frame_samples = nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16); + remaining_frame_samples = total_frame_samples; // Create a new array (to hold all resampled S16 audio samples) all_resampled_samples = (int16_t *) av_malloc( diff --git a/src/FrameMapper.cpp b/src/FrameMapper.cpp index 98b0c112c..0e3b0272e 100644 --- a/src/FrameMapper.cpp +++ b/src/FrameMapper.cpp @@ -36,7 +36,7 @@ using namespace std; using namespace openshot; FrameMapper::FrameMapper(ReaderBase *reader, Fraction target, PulldownType target_pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout) : - reader(reader), target(target), pulldown(target_pulldown), is_dirty(true), avr(NULL) + reader(reader), target(target), pulldown(target_pulldown), is_dirty(true), avr(NULL), parent_position(0.0) { // Set the original frame rate from the reader original = Fraction(reader->info.fps.num, reader->info.fps.den); @@ -112,6 +112,16 @@ void FrameMapper::Init() fields.clear(); frames.clear(); + // Find parent position (if any) + Clip *parent = (Clip *) ParentClip(); + if (parent) { + parent_position = parent->Position(); + parent_start = parent->Start(); + } else { + parent_position = 0.0; + parent_start = 0.0; + } + // Mark as not dirty is_dirty = false; @@ -263,8 +273,9 @@ void FrameMapper::Init() while (remaining_samples > 0) { - // get original samples - int original_samples = Frame::GetSamplesPerFrame(AdjustFrameNumber(end_samples_frame), original, reader->info.sample_rate, reader->info.channels) - end_samples_position; + // Get original samples (with NO framerate adjustments) + // This is the original reader's frame numbers + int original_samples = Frame::GetSamplesPerFrame(end_samples_frame, original, reader->info.sample_rate, reader->info.channels) - end_samples_position; // Enough samples if (original_samples >= remaining_samples) @@ -395,9 +406,20 @@ std::shared_ptr FrameMapper::GetFrame(int64_t requested_frame) // Create a scoped lock, allowing only a single thread to run the following code at one time const GenericScopedLock lock(getFrameCriticalSection); - // Check if mappings are dirty (and need to be recalculated) + // Find parent properties (if any) + Clip *parent = (Clip *) ParentClip(); + if (parent) { + float position = parent->Position(); + float start = parent->Start(); + if (parent_position != position || parent_start != start) { + // Force dirty if parent clip has moved or been trimmed + // since this heavily affects frame #s and audio mappings + is_dirty = true; + } + } + + // Check if mappings are dirty (and need to be recalculated) if (is_dirty) - // Recalculate mappings Init(); // Check final cache a 2nd time (due to potential lock already generating this frame) diff --git a/src/FrameMapper.h b/src/FrameMapper.h index 09f198992..62615cfbe 100644 --- a/src/FrameMapper.h +++ b/src/FrameMapper.h @@ -144,6 +144,8 @@ namespace openshot ReaderBase *reader; // The source video reader CacheMemory final_cache; // Cache of actual Frame objects bool is_dirty; // When this is true, the next call to GetFrame will re-init the mapping + float parent_position; // Position of parent clip (which is used to generate the audio mapping) + float parent_start; // Start of parent clip (which is used to generate the audio mapping) SWRCONTEXT *avr; // Audio resampling context object // Internal methods used by init @@ -199,7 +201,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "FrameMapper"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/ImageReader.h b/src/ImageReader.h index 39c505dc8..a6ab10278 100644 --- a/src/ImageReader.h +++ b/src/ImageReader.h @@ -107,7 +107,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "ImageReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/KeyFrame.h b/src/KeyFrame.h index 14dee7bf0..6da34cace 100644 --- a/src/KeyFrame.h +++ b/src/KeyFrame.h @@ -140,7 +140,7 @@ namespace openshot { /// Get the direction of the curve at a specific index (increasing or decreasing) bool IsIncreasing(int index) const; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const; ///< Generate JSON string of this object Json::Value JsonValue() const; ///< Generate Json::Value for this object void SetJson(const std::string value); ///< Load JSON string into this object diff --git a/src/Point.h b/src/Point.h index 8df34aa93..1795c4692 100644 --- a/src/Point.h +++ b/src/Point.h @@ -118,7 +118,7 @@ namespace openshot /// Set the right handle to a percent of the primary coordinate (0 to 1) void Initialize_RightHandle(float x, float y); - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const; ///< Generate JSON string of this object Json::Value JsonValue() const; ///< Generate Json::Value for this object void SetJson(const std::string value); ///< Load JSON string into this object diff --git a/src/Profiles.h b/src/Profiles.h index 9d8c6d3b0..383051e07 100644 --- a/src/Profiles.h +++ b/src/Profiles.h @@ -88,7 +88,7 @@ namespace openshot /// @param path The folder path / location of a profile file Profile(std::string path); - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const; ///< Generate JSON string of this object Json::Value JsonValue() const; ///< Generate Json::Value for this object void SetJson(const std::string value); ///< Load JSON string into this object diff --git a/src/QtHtmlReader.h b/src/QtHtmlReader.h index 6564477d6..fa2008432 100644 --- a/src/QtHtmlReader.h +++ b/src/QtHtmlReader.h @@ -132,7 +132,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "QtHtmlReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/QtImageReader.h b/src/QtImageReader.h index a489168f0..0dc359b15 100644 --- a/src/QtImageReader.h +++ b/src/QtImageReader.h @@ -114,7 +114,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "QtImageReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/QtTextReader.h b/src/QtTextReader.h index bd6a6a538..db4c73fbb 100644 --- a/src/QtTextReader.h +++ b/src/QtTextReader.h @@ -143,7 +143,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "QtTextReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/ReaderBase.h b/src/ReaderBase.h index e2d93841b..7b7847a8d 100644 --- a/src/ReaderBase.h +++ b/src/ReaderBase.h @@ -139,7 +139,7 @@ namespace openshot /// Return the type name of the class virtual std::string Name() = 0; - /// Get and Set JSON methods + // Get and Set JSON methods virtual std::string Json() const = 0; ///< Generate JSON string of this object virtual void SetJson(const std::string value) = 0; ///< Load JSON string into this object virtual Json::Value JsonValue() const = 0; ///< Generate Json::Value for this object diff --git a/src/TextReader.h b/src/TextReader.h index 2a9fb92a4..4a440a8a3 100644 --- a/src/TextReader.h +++ b/src/TextReader.h @@ -141,7 +141,7 @@ namespace openshot /// Return the type name of the class std::string Name() override { return "TextReader"; }; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/Timeline.cpp b/src/Timeline.cpp index 1bf915e3e..ac6c8f0a3 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -29,8 +29,17 @@ */ #include "Timeline.h" + +#include "CacheBase.h" +#include "CacheDisk.h" +#include "CacheMemory.h" +#include "CrashHandler.h" +#include "FrameMapper.h" #include "Exceptions.h" +#include +#include + using namespace openshot; // Default Constructor for the timeline (which sets the canvas width and height) @@ -78,6 +87,11 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha SetMaxSize(info.width, info.height); } +// Delegating constructor that copies parameters from a provided ReaderInfo +Timeline::Timeline(const ReaderInfo info) : + Timeline::Timeline(info.width, info.height, info.fps, info.sample_rate, + info.channels, info.channel_layout) {}; + // Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline) Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) : is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath), diff --git a/src/Timeline.h b/src/Timeline.h index 16ceda8ed..236e1352a 100644 --- a/src/Timeline.h +++ b/src/Timeline.h @@ -38,30 +38,27 @@ #include #include #include -#include "CacheBase.h" -#include "CacheDisk.h" -#include "CacheMemory.h" + +#include "TimelineBase.h" +#include "ReaderBase.h" + #include "Color.h" #include "Clip.h" -#include "CrashHandler.h" -#include "Point.h" #include "EffectBase.h" -#include "Effects.h" -#include "EffectInfo.h" #include "Fraction.h" #include "Frame.h" -#include "FrameMapper.h" #include "KeyFrame.h" #include "TrackedObjectBBox.h" #include "TrackedObjectBase.h" -#include "OpenMPUtilities.h" -#include "ReaderBase.h" -#include "Settings.h" -#include "TimelineBase.h" + namespace openshot { + // Forward decls + class FrameMapper; + class CacheBase; + /// Comparison method for sorting clip pointers (by Layer and then Position). Clips are sorted /// from lowest layer to top layer (since that is the sequence they need to be combined), and then /// by position (left to right). @@ -189,7 +186,7 @@ namespace openshot { /// Apply a FrameMapper to a clip which matches the settings of this timeline void apply_mapper_to_clip(openshot::Clip* clip); - /// Apply JSON Diffs to various objects contained in this timeline + // Apply JSON Diffs to various objects contained in this timeline void apply_json_to_clips(Json::Value change); /// GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Blur.h b/src/effects/Blur.h index b20cf258a..180f1ebdb 100644 --- a/src/effects/Blur.h +++ b/src/effects/Blur.h @@ -57,7 +57,7 @@ namespace openshot /// Init effect settings void init_effect_details(); - /// Internal blur methods (inspired and credited to http://blog.ivank.net/fastest-gaussian-blur.html) + // Internal blur methods (inspired and credited to http://blog.ivank.net/fastest-gaussian-blur.html) void boxBlurH(unsigned char *scl, unsigned char *tcl, int w, int h, int r); void boxBlurT(unsigned char *scl, unsigned char *tcl, int w, int h, int r); @@ -99,7 +99,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Brightness.h b/src/effects/Brightness.h index 8cf3cd0ac..7cb3b1461 100644 --- a/src/effects/Brightness.h +++ b/src/effects/Brightness.h @@ -88,7 +88,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Caption.h b/src/effects/Caption.h index 618977409..9fa869a3d 100644 --- a/src/effects/Caption.h +++ b/src/effects/Caption.h @@ -117,7 +117,7 @@ class Caption : public EffectBase std::string CaptionText(); ///< Set the caption string to use (see VTT format) void CaptionText(std::string new_caption_text); ///< Get the caption string - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/ChromaKey.cpp b/src/effects/ChromaKey.cpp index f66707e8b..2e562b141 100644 --- a/src/effects/ChromaKey.cpp +++ b/src/effects/ChromaKey.cpp @@ -83,18 +83,24 @@ std::shared_ptr ChromaKey::GetFrame(std::shared_ptrwidth() * image->height(); pixel++, byte_index+=4) { // Get the RGB values from the pixel - unsigned char R = pixels[byte_index]; - unsigned char G = pixels[byte_index + 1]; - unsigned char B = pixels[byte_index + 2]; - unsigned char A = pixels[byte_index + 3]; - - // Get distance between mask color and pixel color - long distance = Color::GetDistance((long)R, (long)G, (long)B, mask_R, mask_G, mask_B); - - // Alpha out the pixel (if color similar) - if (distance <= threshold) - // MATCHED - Make pixel transparent - pixels[byte_index + 3] = 0; + // Remove the premultiplied alpha values from R,G,B + float A = float(pixels[byte_index + 3]); + unsigned char R = (pixels[byte_index] / A) * 255.0; + unsigned char G = (pixels[byte_index + 1] / A) * 255.0; + unsigned char B = (pixels[byte_index + 2] / A) * 255.0; + + // Get distance between mask color and pixel color + long distance = Color::GetDistance((long)R, (long)G, (long)B, mask_R, mask_G, mask_B); + + if (distance <= threshold) { + // MATCHED - Make pixel transparent + // Due to premultiplied alpha, we must also zero out + // the individual color channels (or else artifacts are left behind) + pixels[byte_index] = 0; + pixels[byte_index + 1] = 0; + pixels[byte_index + 2] = 0; + pixels[byte_index + 3] = 0; + } } // return the modified frame @@ -168,7 +174,7 @@ std::string ChromaKey::PropertiesJSON(int64_t requested_frame) const { root["color"]["red"] = add_property_json("Red", color.red.GetValue(requested_frame), "float", "", &color.red, 0, 255, false, requested_frame); root["color"]["blue"] = add_property_json("Blue", color.blue.GetValue(requested_frame), "float", "", &color.blue, 0, 255, false, requested_frame); root["color"]["green"] = add_property_json("Green", color.green.GetValue(requested_frame), "float", "", &color.green, 0, 255, false, requested_frame); - root["fuzz"] = add_property_json("Fuzz", fuzz.GetValue(requested_frame), "float", "", &fuzz, 0, 25, false, requested_frame); + root["fuzz"] = add_property_json("Fuzz", fuzz.GetValue(requested_frame), "float", "", &fuzz, 0, 125, false, requested_frame); // Set the parent effect which properties this effect will inherit root["parent_effect_id"] = add_property_json("Parent", 0.0, "string", info.parent_effect_id, NULL, -1, -1, false, requested_frame); diff --git a/src/effects/ChromaKey.h b/src/effects/ChromaKey.h index 3116d1a92..f66f13091 100644 --- a/src/effects/ChromaKey.h +++ b/src/effects/ChromaKey.h @@ -91,7 +91,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/ColorShift.h b/src/effects/ColorShift.h index fc772cb5a..98c62674b 100644 --- a/src/effects/ColorShift.h +++ b/src/effects/ColorShift.h @@ -100,7 +100,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Crop.h b/src/effects/Crop.h index e3f60e133..f43f549c4 100644 --- a/src/effects/Crop.h +++ b/src/effects/Crop.h @@ -96,7 +96,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Deinterlace.h b/src/effects/Deinterlace.h index 6e5e8b1ee..5c1230c71 100644 --- a/src/effects/Deinterlace.h +++ b/src/effects/Deinterlace.h @@ -59,10 +59,10 @@ namespace openshot public: - /// Blank constructor, useful when using Json to load the effect properties + /// Default constructor, useful when using Json to load the effect properties Deinterlace(); - /// Default constructor + /// Constructor Deinterlace(bool isOdd); /// @brief This method is required for all derived classes of ClipBase, and returns a @@ -84,7 +84,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Hue.h b/src/effects/Hue.h index affeef0d2..e35c9ece0 100644 --- a/src/effects/Hue.h +++ b/src/effects/Hue.h @@ -60,10 +60,10 @@ namespace openshot public: Keyframe hue; ///< Shift the hue coordinates (left or right) - /// Blank constructor, useful when using Json to load the effect properties + /// Default constructor, useful when using Json to load the effect properties Hue(); - /// Default constructor, which takes 1 curve. The curves will shift the hue of the image. + /// Constructor which takes 1 curve. The curves will shift the hue of the image. /// /// @param hue The curve to adjust the hue shift (between 0 and 1) Hue(Keyframe hue); @@ -87,7 +87,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Mask.h b/src/effects/Mask.h index b2faefa2f..86d5a38bb 100644 --- a/src/effects/Mask.h +++ b/src/effects/Mask.h @@ -97,7 +97,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Negate.h b/src/effects/Negate.h index 47ff739c5..591a84fce 100644 --- a/src/effects/Negate.h +++ b/src/effects/Negate.h @@ -75,7 +75,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/ObjectDetection.h b/src/effects/ObjectDetection.h index 7d5a00093..fee9382bb 100644 --- a/src/effects/ObjectDetection.h +++ b/src/effects/ObjectDetection.h @@ -67,17 +67,17 @@ struct DetectionData{ namespace openshot { - /** - * @brief This effect displays all the detected objects on a clip. - */ - class ObjectDetection : public EffectBase - { - private: - std::string protobuf_data_path; - std::map detectionsData; - std::vector classNames; - - std::vector classesColor; + /** + * @brief This effect displays all the detected objects on a clip. + */ + class ObjectDetection : public EffectBase + { + private: + std::string protobuf_data_path; + std::map detectionsData; + std::vector classNames; + + std::vector classesColor; /// Minimum confidence value to display the detected objects float confidence_threshold = 0.5; @@ -94,49 +94,47 @@ namespace openshot void DrawRectangleRGBA(cv::Mat &frame_image, cv::RotatedRect box, std::vector color, float alpha, int thickness, bool is_background); - public: - - /// Index of the Tracked Object that was selected to modify it's properties - int selectedObjectIndex; + public: + /// Index of the Tracked Object that was selected to modify it's properties + int selectedObjectIndex; - /// Blank constructor, useful when using Json to load the effect properties - ObjectDetection(std::string clipTrackerDataPath); + ObjectDetection(std::string clipTrackerDataPath); - /// Default constructor - ObjectDetection(); + /// Default constructor + ObjectDetection(); - /// @brief This method is required for all derived classes of EffectBase, and returns a - /// modified openshot::Frame object - /// - /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from its keyframes (starting at 1). - /// - /// @returns The modified openshot::Frame object - /// @param frame The frame object that needs the effect applied to it - /// @param frame_number The frame number (starting at 1) of the effect on the timeline. - std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from its keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - std::shared_ptr GetFrame(int64_t frame_number) override { return GetFrame(std::shared_ptr (new Frame()), frame_number); } + std::shared_ptr GetFrame(int64_t frame_number) override { return GetFrame(std::make_shared(), frame_number); } - /// Load protobuf data file + /// Load protobuf data file bool LoadObjDetectdData(std::string inputFilePath); - /// Get tracker info for the desired frame - DetectionData GetTrackedData(size_t frameId); + /// Get tracker info for the desired frame + DetectionData GetTrackedData(size_t frameId); - /// Get the indexes and IDs of all visible objects in the given frame - std::string GetVisibleObjects(int64_t frame_number) const override; + /// Get the indexes and IDs of all visible objects in the given frame + std::string GetVisibleObjects(int64_t frame_number) const override; - /// Get and Set JSON methods - std::string Json() const override; ///< Generate JSON string of this object - void SetJson(const std::string value) override; ///< Load JSON string into this object - Json::Value JsonValue() const override; ///< Generate Json::Value for this object - void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object + // Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value) override; ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object - /// Get all properties for a specific frame (perfect for a UI to display the current state - /// of all properties at any time) - std::string PropertiesJSON(int64_t requested_frame) const override; - }; + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + std::string PropertiesJSON(int64_t requested_frame) const override; + }; } diff --git a/src/effects/Pixelate.h b/src/effects/Pixelate.h index bfa5fcb9c..453c33eb8 100644 --- a/src/effects/Pixelate.h +++ b/src/effects/Pixelate.h @@ -63,10 +63,10 @@ namespace openshot Keyframe right; ///< Size of right margin Keyframe bottom; ///< Size of bottom margin - /// Blank constructor, useful when using Json to load the effect properties + /// Default constructor, useful when using Json to load the effect properties Pixelate(); - /// Default constructor, which takes 5 curves. These curves animate the pixelization effect over time. + /// Cnstructor which takes 5 curves. These curves animate the pixelization effect over time. /// /// @param pixelization The curve to adjust the amount of pixelization (0 to 1) /// @param left The curve to adjust the left margin size (between 0 and 1) @@ -94,7 +94,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Saturation.h b/src/effects/Saturation.h index 33928d106..cfa945fc0 100644 --- a/src/effects/Saturation.h +++ b/src/effects/Saturation.h @@ -99,7 +99,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Shift.h b/src/effects/Shift.h index e823334ab..d7b964161 100644 --- a/src/effects/Shift.h +++ b/src/effects/Shift.h @@ -90,7 +90,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/effects/Stabilizer.cpp b/src/effects/Stabilizer.cpp index 087959c26..e7fb3c029 100644 --- a/src/effects/Stabilizer.cpp +++ b/src/effects/Stabilizer.cpp @@ -156,11 +156,6 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){ transformationData[id] = EffectTransformParam(dx,dy,da); } - // Show the time stamp from the last update in stabilization data file - if (stabilizationMessage.has_last_updated()) { - cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(stabilizationMessage.last_updated()) << endl; - } - // Delete all global objects allocated by libprotobuf. google::protobuf::ShutdownProtobufLibrary(); diff --git a/src/effects/Stabilizer.h b/src/effects/Stabilizer.h index 4217958ec..73e8090ad 100644 --- a/src/effects/Stabilizer.h +++ b/src/effects/Stabilizer.h @@ -75,59 +75,57 @@ struct EffectCamTrajectory namespace openshot { - /** - * @brief This class stabilizes a video clip to remove undesired shaking and jitter. - * - * Adding stabilization is useful to increase video quality overall, since it removes - * from subtle to harsh unexpected camera movements. - */ - class Stabilizer : public EffectBase - { - private: - /// Init effect settings - void init_effect_details(); - std::string protobuf_data_path; - Keyframe zoom; - - public: - std::string teste; - std::map trajectoryData; // Save camera trajectory data - std::map transformationData; // Save transormation data - - /// Blank constructor, useful when using Json to load the effect properties - Stabilizer(std::string clipTrackerDataPath); - - /// Default constructor - Stabilizer(); - - /// @brief This method is required for all derived classes of EffectBase, and returns a - /// modified openshot::Frame object - /// - /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from its keyframes (starting at 1). - /// - /// @returns The modified openshot::Frame object - /// @param frame The frame object that needs the effect applied to it - /// @param frame_number The frame number (starting at 1) of the effect on the timeline. - std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - - std::shared_ptr GetFrame(int64_t frame_number) override { - return GetFrame(std::make_shared(), frame_number); - }; - - // Load protobuf data file - bool LoadStabilizedData(std::string inputFilePath); - - /// Get and Set JSON methods - std::string Json() const override; ///< Generate JSON string of this object - void SetJson(const std::string value) override; ///< Load JSON string into this object - Json::Value JsonValue() const override; ///< Generate Json::Value for this object - void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object - - /// Get all properties for a specific frame (perfect for a UI to display the current state - /// of all properties at any time) - std::string PropertiesJSON(int64_t requested_frame) const override; - }; + /** + * @brief This class stabilizes a video clip to remove undesired shaking and jitter. + * + * Adding stabilization is useful to increase video quality overall, since it removes + * from subtle to harsh unexpected camera movements. + */ + class Stabilizer : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + std::string protobuf_data_path; + Keyframe zoom; + + public: + std::string teste; + std::map trajectoryData; // Save camera trajectory data + std::map transformationData; // Save transormation data + + Stabilizer(); + + Stabilizer(std::string clipTrackerDataPath); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from its keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; + + std::shared_ptr GetFrame(int64_t frame_number) override { + return GetFrame(std::make_shared(), frame_number); + }; + + /// Load protobuf data file + bool LoadStabilizedData(std::string inputFilePath); + + // Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value) override; ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + std::string PropertiesJSON(int64_t requested_frame) const override; + }; } diff --git a/src/effects/Tracker.cpp b/src/effects/Tracker.cpp index bf058ea72..52dc90796 100644 --- a/src/effects/Tracker.cpp +++ b/src/effects/Tracker.cpp @@ -356,4 +356,4 @@ std::string Tracker::PropertiesJSON(int64_t requested_frame) const { // Return formatted string return root.toStyledString(); -} \ No newline at end of file +} diff --git a/src/effects/Tracker.h b/src/effects/Tracker.h index 749ec1b4b..2faafcb27 100644 --- a/src/effects/Tracker.h +++ b/src/effects/Tracker.h @@ -47,59 +47,60 @@ using namespace std; namespace openshot { - /** - * @brief This class tracks a given object through the clip, draws a box around it and allow - * the user to attach another clip (image or video) to the tracked object. - * - * Tracking is useful to better visualize, follow the movement of an object through video - * and attach an image or video to it. - */ - class Tracker : public EffectBase - { - private: - /// Init effect settings - void init_effect_details(); - - Fraction BaseFPS; - double TimeScale; - - public: + /** + * @brief This class tracks a given object through the clip, draws a box around it and allow + * the user to attach another clip (image or video) to the tracked object. + * + * Tracking is useful to better visualize, follow the movement of an object through video + * and attach an image or video to it. + */ + class Tracker : public EffectBase + { + private: + /// Init effect settings + void init_effect_details(); + + Fraction BaseFPS; + double TimeScale; + + public: std::string protobuf_data_path; ///< Path to the protobuf file that holds the bounding-box data std::shared_ptr trackedData; ///< Pointer to an object that holds the bounding-box data and it's Keyframes /// Blank constructor, useful when using Json to load the effect properties Tracker(std::string clipTrackerDataPath); - /// Default constructor - Tracker(); - - /// @brief This method is required for all derived classes of EffectBase, and returns a - /// modified openshot::Frame object - /// - /// The frame object is passed into this method, and a frame_number is passed in which - /// tells the effect which settings to use from its keyframes (starting at 1). - /// - /// @returns The modified openshot::Frame object - /// @param frame The frame object that needs the effect applied to it - /// @param frame_number The frame number (starting at 1) of the effect on the timeline. - std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - std::shared_ptr GetFrame(int64_t frame_number) override { return GetFrame(std::shared_ptr (new Frame()), frame_number); } - - /// Get the indexes and IDs of all visible objects in the given frame - std::string GetVisibleObjects(int64_t frame_number) const override; - - void DrawRectangleRGBA(cv::Mat &frame_image, cv::RotatedRect box, std::vector color, float alpha, int thickness, bool is_background); - - /// Get and Set JSON methods - std::string Json() const override; ///< Generate JSON string of this object - void SetJson(const std::string value) override; ///< Load JSON string into this object - Json::Value JsonValue() const override; ///< Generate Json::Value for this object - void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object - - /// Get all properties for a specific frame (perfect for a UI to display the current state - /// of all properties at any time) - std::string PropertiesJSON(int64_t requested_frame) const override; - }; + /// Default constructor + Tracker(); + + /// @brief This method is required for all derived classes of EffectBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method, and a frame_number is passed in which + /// tells the effect which settings to use from its keyframes (starting at 1). + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the effect applied to it + /// @param frame_number The frame number (starting at 1) of the effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; + std::shared_ptr GetFrame(int64_t frame_number) override { return GetFrame(std::shared_ptr (new Frame()), frame_number); } + + /// Get the indexes and IDs of all visible objects in the given frame + std::string GetVisibleObjects(int64_t frame_number) const override; + + void DrawRectangleRGBA(cv::Mat &frame_image, cv::RotatedRect box, std::vector color, float alpha, int thickness, bool is_background); + + // Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value) override; ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + std::string PropertiesJSON(int64_t requested_frame) const override; + }; + } #endif diff --git a/src/effects/Wave.h b/src/effects/Wave.h index 25356b5a8..8ccd815ad 100644 --- a/src/effects/Wave.h +++ b/src/effects/Wave.h @@ -65,10 +65,10 @@ namespace openshot Keyframe shift_x; ///< Amount to shift X-axis Keyframe speed_y; ///< Speed of the wave on the Y-axis - /// Blank constructor, useful when using Json to load the effect properties + /// Default constructor, useful when using Json to load the effect properties Wave(); - /// Default constructor, which takes 5 curves. The curves will distort the image. + /// Constructor which takes 5 curves. The curves will distort the image. /// /// @param wavelength The curve to adjust the wavelength (0.0 to 3.0) /// @param amplitude The curve to adjust the amplitude (0.0 to 5.0) @@ -96,7 +96,7 @@ namespace openshot /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; - /// Get and Set JSON methods + // Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object void SetJson(const std::string value) override; ///< Load JSON string into this object Json::Value JsonValue() const override; ///< Generate Json::Value for this object diff --git a/src/sort_filter/KalmanTracker.h b/src/sort_filter/KalmanTracker.h index 648e704c8..496a74a26 100644 --- a/src/sort_filter/KalmanTracker.h +++ b/src/sort_filter/KalmanTracker.h @@ -7,13 +7,13 @@ #include "opencv2/video/tracking.hpp" #include "opencv2/highgui/highgui.hpp" - -#define StateType cv::Rect_ - -// This class represents the internel state of individual tracked objects observed as bounding box. -class KalmanTracker -{ -public: + +#define StateType cv::Rect_ + +/// This class represents the internel state of individual tracked objects observed as bounding box. +class KalmanTracker +{ +public: KalmanTracker() { init_kf(StateType()); diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index c649db9e9..f2c6326a5 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -26,24 +26,6 @@ # Test media path, used by unit tests for input data file(TO_NATIVE_PATH "${PROJECT_SOURCE_DIR}/examples/" TEST_MEDIA_PATH) -add_definitions( -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" ) - -################### UNITTEST++ ##################### -# Find UnitTest++ libraries (used for unit testing) -find_package(UnitTest++) - -if (NOT UnitTest++_FOUND) - set(TESTS_ENABLED OFF PARENT_SCOPE) - return() -endif() - -# Include UnitTest++ headers (needed for compile) -include_directories(${UnitTest++_INCLUDE_DIRS}) - -set_package_properties(UnitTest++ PROPERTIES - TYPE RECOMMENDED - PURPOSE "Unit testing framework") - ################# BLACKMAGIC DECKLINK ################### if(ENABLE_BLACKMAGIC) @@ -56,53 +38,68 @@ if(ENABLE_BLACKMAGIC) endif() endif() -############### SET TEST SOURCE FILES ################# -set(OPENSHOT_TEST_FILES - Cache_Tests.cpp - Clip_Tests.cpp - Color_Tests.cpp - Coordinate_Tests.cpp - DummyReader_Tests.cpp - ReaderBase_Tests.cpp - ImageWriter_Tests.cpp - FFmpegReader_Tests.cpp - FFmpegWriter_Tests.cpp - Fraction_Tests.cpp - Frame_Tests.cpp - FrameMapper_Tests.cpp - KeyFrame_Tests.cpp - Point_Tests.cpp - QtImageReader_Tests.cpp - Settings_Tests.cpp - Timeline_Tests.cpp) +### +### TEST SOURCE FILES +### +set(OPENSHOT_TESTS + CacheDisk + CacheMemory + Clip + Color + Coordinate + DummyReader + ReaderBase + ImageWriter + FFmpegReader + FFmpegWriter + Fraction + Frame + FrameMapper + KeyFrame + Point + QtImageReader + Settings + Timeline +) -########## SET OPENCV RELATED TEST FILES ############### +### +### OPENCV RELATED TEST FILES +### if(ENABLE_OPENCV) - list(APPEND OPENSHOT_TEST_FILES - CVTracker_Tests.cpp - CVStabilizer_Tests.cpp - # CVObjectDetection_Tests.cpp + list(APPEND OPENSHOT_TESTS + CVTracker + CVStabilizer + # CVObjectDetection ) endif() -################ TESTER EXECUTABLE ################# -# Create unit test executable (openshot-test) -message (STATUS "Tests enabled, test executable will be built as tests/openshot-test") +### +### Catch2 unit tests +### +if (TESTS_ENABLED) + message (STATUS "Tests enabled, test executables will be compiled") + include(Catch) -add_executable(openshot-test - tests.cpp - ${OPENSHOT_TEST_FILES} -) + include(CTest) -# Link libraries to the new executable -target_link_libraries(openshot-test - openshot - ${UnitTest++_LIBRARIES} -) + # Create object library for test executable main(), + # to avoid recompiling for every test + add_library(catch-main OBJECT catch_main.cpp) -##### RUNNING TESTS (make os_test / make test) ##### -# Hook up the 'make os_test' target to the 'openshot-test' executable, -# if we aren't defining it as the coverage target -if(NOT ENABLE_COVERAGE) - add_custom_target(os_test COMMAND openshot-test) + foreach(tname ${OPENSHOT_TESTS}) + add_executable(openshot-${tname}-test ${tname}.cpp $) + target_compile_definitions(openshot-${tname}-test PRIVATE + TEST_MEDIA_PATH="${TEST_MEDIA_PATH}" + ) + target_link_libraries(openshot-${tname}-test Catch2::Catch2 openshot) + # Automatically configure CTest targets from Catch2 test cases + catch_discover_tests( + openshot-${tname}-test + TEST_PREFIX ${tname}: + ) + list(APPEND CATCH2_TEST_TARGETS openshot-${tname}-test) + endforeach() + # Export target list for coverage use + set(UNIT_TEST_TARGETS ${CATCH2_TEST_TARGETS} PARENT_SCOPE) endif() + diff --git a/tests/CVObjectDetection.cpp b/tests/CVObjectDetection.cpp new file mode 100644 index 000000000..ce948074d --- /dev/null +++ b/tests/CVObjectDetection.cpp @@ -0,0 +1,132 @@ +/** + * @file + * @brief Unit tests for CVObjectDetection + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2020 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include +#include + +#include + +#include "Clip.h" +#include "CVObjectDetection.h" +#include "ProcessingController.h" +#include "Json.h" + +using namespace openshot; + +std::string effectInfo =(" {\"protobuf_data_path\": \"objdetector.data\", " + " \"processing_device\": \"GPU\", " + " \"model_configuration\": \"~/yolo/yolov3.cfg\", " + " \"model_weights\": \"~/yolo/yolov3.weights\", " + " \"classes_file\": \"~/yolo/obj.names\"} "); + +// Just for the stabilizer constructor, it won't be used +ProcessingController processingController; + +TEST_CASE( "DetectObject_Video", "[libopenshot][opencv][objectdetection]" ) +{ + // Create a video clip + std::stringstream path; + path << TEST_MEDIA_PATH << "run.mp4"; + + // Open clip + openshot::Clip c1(path.str()); + c1.Open(); + + //TODO remove hardcoded path + CVObjectDetection objectDetector(effectInfo, processingController); + + objectDetector.detectObjectsClip(c1, 0, 20, true); + + CVDetectionData dd = objectDetector.GetDetectionData(20); + + float x1 = dd.boxes.at(20).x; + float y1 = dd.boxes.at(20).y; + float x2 = x1 + dd.boxes.at(20).width; + float y2 = y1 + dd.boxes.at(20).height; + float confidence = dd.confidences.at(20); + int classId = dd.classIds.at(20); + + CHECK((int) (x1 * 720) == 106); + CHECK((int) (y1 * 400) == 21); + CHECK((int) (x2 * 720) == 628); + CHECK((int) (y2 * 400) == 429); + CHECK((int) (confidence * 1000) == 554); + CHECK(classId == 0); + +} + + +TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][objectdetection]" ) +{ + + // Create a video clip + std::stringstream path; + path << TEST_MEDIA_PATH << "run.mp4"; + + // Open clip + openshot::Clip c1(path.str()); + c1.Open(); + + //TODO remove hardcoded path + CVObjectDetection objectDetector_1(effectInfo ,processingController); + + objectDetector_1.detectObjectsClip(c1, 0, 20, true); + + CVDetectionData dd_1 = objectDetector_1.GetDetectionData(20); + + float x1_1 = dd_1.boxes.at(20).x; + float y1_1 = dd_1.boxes.at(20).y; + float x2_1 = x1_1 + dd_1.boxes.at(20).width; + float y2_1 = y1_1 + dd_1.boxes.at(20).height; + float confidence_1 = dd_1.confidences.at(20); + int classId_1 = dd_1.classIds.at(20); + + objectDetector_1.SaveObjDetectedData(); + + CVObjectDetection objectDetector_2(effectInfo, processingController); + + objectDetector_2._LoadObjDetectdData(); + + CVDetectionData dd_2 = objectDetector_2.GetDetectionData(20); + + float x1_2 = dd_2.boxes.at(20).x; + float y1_2 = dd_2.boxes.at(20).y; + float x2_2 = x1_2 + dd_2.boxes.at(20).width; + float y2_2 = y1_2 + dd_2.boxes.at(20).height; + float confidence_2 = dd_2.confidences.at(20); + int classId_2 = dd_2.classIds.at(20); + + CHECK((int) (x1_1 * 720) == (int) (x1_2 * 720)); + CHECK((int) (y1_1 * 400) == (int) (y1_2 * 400)); + CHECK((int) (x2_1 * 720) == (int) (x2_2 * 720)); + CHECK((int) (y2_1 * 400) == (int) (y2_2 * 400)); + CHECK((int) (confidence_1 * 1000) == (int) (confidence_2 * 1000)); + CHECK(classId_1 == classId_2); +} diff --git a/tests/CVObjectDetection_Tests.cpp b/tests/CVObjectDetection_Tests.cpp deleted file mode 100644 index e6c997a00..000000000 --- a/tests/CVObjectDetection_Tests.cpp +++ /dev/null @@ -1,140 +0,0 @@ -/** - * @file - * @brief Unit tests for openshot::Frame - * @author Jonathan Thomas - * @author FeRD (Frank Dana) - * - * @ref License - */ - -/* LICENSE - * - * Copyright (c) 2008-2019 OpenShot Studios, LLC - * . This file is part of - * OpenShot Library (libopenshot), an open-source project dedicated to - * delivering high quality video editing and animation solutions to the - * world. For more information visit . - * - * OpenShot Library (libopenshot) is free software: you can redistribute it - * and/or modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * OpenShot Library (libopenshot) is distributed in the hope that it will be - * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with OpenShot Library. If not, see . - */ - -#include -#include - -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 -#include "Clip.h" -#include "CVObjectDetection.h" -#include "ProcessingController.h" -#include "Json.h" - -using namespace openshot; - -std::string effectInfo =(" {\"protobuf_data_path\": \"objdetector.data\", " - " \"processing_device\": \"GPU\", " - " \"model_configuration\": \"~/yolo/yolov3.cfg\", " - " \"model_weights\": \"~/yolo/yolov3.weights\", " - " \"classes_file\": \"~/yolo/obj.names\"} "); - -SUITE(CVObjectDetection_Tests) -{ - - // Just for the stabilizer constructor, it won't be used - ProcessingController processingController; - - TEST(DetectObject_Video) - { - // Create a video clip - std::stringstream path; - path << TEST_MEDIA_PATH << "run.mp4"; - - // Open clip - openshot::Clip c1(path.str()); - c1.Open(); - - //TODO remove hardcoded path - CVObjectDetection objectDetector(effectInfo, processingController); - - objectDetector.detectObjectsClip(c1, 0, 20, true); - - CVDetectionData dd = objectDetector.GetDetectionData(20); - - float x1 = dd.boxes.at(20).x; - float y1 = dd.boxes.at(20).y; - float x2 = x1 + dd.boxes.at(20).width; - float y2 = y1 + dd.boxes.at(20).height; - float confidence = dd.confidences.at(20); - int classId = dd.classIds.at(20); - - CHECK_EQUAL((int) (x1 * 720), 106); - CHECK_EQUAL((int) (y1 * 400), 21); - CHECK_EQUAL((int) (x2 * 720), 628); - CHECK_EQUAL((int) (y2 * 400), 429); - CHECK_EQUAL((int) (confidence * 1000), 554); - CHECK_EQUAL(classId, 0); - - } - - - TEST(SaveLoad_Protobuf) - { - - // Create a video clip - std::stringstream path; - path << TEST_MEDIA_PATH << "run.mp4"; - - // Open clip - openshot::Clip c1(path.str()); - c1.Open(); - - //TODO remove hardcoded path - CVObjectDetection objectDetector_1(effectInfo ,processingController); - - objectDetector_1.detectObjectsClip(c1, 0, 20, true); - - CVDetectionData dd_1 = objectDetector_1.GetDetectionData(20); - - float x1_1 = dd_1.boxes.at(20).x; - float y1_1 = dd_1.boxes.at(20).y; - float x2_1 = x1_1 + dd_1.boxes.at(20).width; - float y2_1 = y1_1 + dd_1.boxes.at(20).height; - float confidence_1 = dd_1.confidences.at(20); - int classId_1 = dd_1.classIds.at(20); - - objectDetector_1.SaveObjDetectedData(); - - CVObjectDetection objectDetector_2(effectInfo, processingController); - - objectDetector_2._LoadObjDetectdData(); - - CVDetectionData dd_2 = objectDetector_2.GetDetectionData(20); - - float x1_2 = dd_2.boxes.at(20).x; - float y1_2 = dd_2.boxes.at(20).y; - float x2_2 = x1_2 + dd_2.boxes.at(20).width; - float y2_2 = y1_2 + dd_2.boxes.at(20).height; - float confidence_2 = dd_2.confidences.at(20); - int classId_2 = dd_2.classIds.at(20); - - CHECK_EQUAL((int) (x1_1 * 720), (int) (x1_2 * 720)); - CHECK_EQUAL((int) (y1_1 * 400), (int) (y1_2 * 400)); - CHECK_EQUAL((int) (x2_1 * 720), (int) (x2_2 * 720)); - CHECK_EQUAL((int) (y2_1 * 400), (int) (y2_2 * 400)); - CHECK_EQUAL((int) (confidence_1 * 1000), (int) (confidence_2 * 1000)); - CHECK_EQUAL(classId_1, classId_2); - - } - -} // SUITE(Frame_Tests) diff --git a/tests/CVStabilizer.cpp b/tests/CVStabilizer.cpp new file mode 100644 index 000000000..ed3e5403c --- /dev/null +++ b/tests/CVStabilizer.cpp @@ -0,0 +1,135 @@ +/** + * @file + * @brief Unit tests for CVStabilizer + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2020 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include +#include + +#include + +#include "Clip.h" +#include "CVStabilization.h" // for TransformParam, CamTrajectory, CVStabilization +#include "ProcessingController.h" + +using namespace openshot; + +// Just for the stabilizer constructor, it won't be used +ProcessingController stabilizer_pc; + +TEST_CASE( "Stabilize_Video", "[libopenshot][opencv][stabilizer]" ) +{ + // Create a video clip + std::stringstream path; + path << TEST_MEDIA_PATH << "test.avi"; + + // Open clip + openshot::Clip c1(path.str()); + c1.Open(); + + std::string json_data = R"proto( + { + "protobuf_data_path": "stabilizer.data", + "smoothing-window": 30 + } )proto"; + + // Create stabilizer + CVStabilization stabilizer(json_data, stabilizer_pc); + + // Stabilize clip for frames 0-21 + stabilizer.stabilizeClip(c1, 0, 21, true); + + // Get stabilized data + TransformParam tp = stabilizer.GetTransformParamData(20); + CamTrajectory ct = stabilizer.GetCamTrajectoryTrackedData(20); + + // // Compare if stabilized data is equal to pre-tested ones + int dx = tp.dx*1000; + int dy = tp.dy*1000; + int da = tp.da*1000; + int x = ct.x*1000; + int y = ct.y*1000; + int a = ct.a*1000; + + CHECK(dx == (int) (58)); + CHECK(dy == (int) (-88)); + CHECK(da == (int) (7)); + CHECK(x == (int) (0)); + CHECK(y == (int) (-1)); + CHECK(a == (int) (0)); +} + + +TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][stabilizer]" ) +{ + + // Create a video clip + std::stringstream path; + path << TEST_MEDIA_PATH << "test.avi"; + + // Open clip + openshot::Clip c1(path.str()); + c1.Open(); + + std::string json_data = R"proto( + { + "protobuf_data_path": "stabilizer.data", + "smoothing-window": 30 + } )proto"; + + // Create first stabilizer + CVStabilization stabilizer_1(json_data, stabilizer_pc); + + // Stabilize clip for frames 0-20 + stabilizer_1.stabilizeClip(c1, 0, 20+1, true); + + // Get stabilized data + TransformParam tp_1 = stabilizer_1.GetTransformParamData(20); + CamTrajectory ct_1 = stabilizer_1.GetCamTrajectoryTrackedData(20); + + // Save stabilized data + stabilizer_1.SaveStabilizedData(); + + // Create second stabilizer + CVStabilization stabilizer_2(json_data, stabilizer_pc); + + // Load stabilized data from first stabilizer protobuf data + stabilizer_2._LoadStabilizedData(); + + // Get stabilized data + TransformParam tp_2 = stabilizer_2.GetTransformParamData(20); + CamTrajectory ct_2 = stabilizer_2.GetCamTrajectoryTrackedData(20); + + // Compare first stabilizer data with second stabilizer data + CHECK((int) (tp_1.dx * 10000) == (int) (tp_2.dx *10000)); + CHECK((int) (tp_1.dy * 10000) == (int) (tp_2.dy * 10000)); + CHECK((int) (tp_1.da * 10000) == (int) (tp_2.da * 10000)); + CHECK((int) (ct_1.x * 10000) == (int) (ct_2.x * 10000)); + CHECK((int) (ct_1.y * 10000) == (int) (ct_2.y * 10000)); + CHECK((int) (ct_1.a * 10000) == (int) (ct_2.a * 10000)); +} diff --git a/tests/CVStabilizer_Tests.cpp b/tests/CVStabilizer_Tests.cpp deleted file mode 100644 index 4d1f9305c..000000000 --- a/tests/CVStabilizer_Tests.cpp +++ /dev/null @@ -1,142 +0,0 @@ -/** - * @file - * @brief Unit tests for openshot::Frame - * @author Jonathan Thomas - * @author FeRD (Frank Dana) - * - * @ref License - */ - -/* LICENSE - * - * Copyright (c) 2008-2019 OpenShot Studios, LLC - * . This file is part of - * OpenShot Library (libopenshot), an open-source project dedicated to - * delivering high quality video editing and animation solutions to the - * world. For more information visit . - * - * OpenShot Library (libopenshot) is free software: you can redistribute it - * and/or modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * OpenShot Library (libopenshot) is distributed in the hope that it will be - * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with OpenShot Library. If not, see . - */ - -#include -#include - -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 -#include "Clip.h" -#include "CVStabilization.h" // for TransformParam, CamTrajectory, CVStabilization -#include "ProcessingController.h" - -using namespace openshot; - -SUITE(CVStabilizer_Tests) -{ - - // Just for the stabilizer constructor, it won't be used - ProcessingController processingController; - - TEST(Stabilize_Video) - { - // Create a video clip - std::stringstream path; - path << TEST_MEDIA_PATH << "test.avi"; - - // Open clip - openshot::Clip c1(path.str()); - c1.Open(); - - std::string json_data = R"proto( - { - "protobuf_data_path": "stabilizer.data", - "smoothing-window": 30 - } )proto"; - - // Create stabilizer - CVStabilization stabilizer(json_data, processingController); - - // Stabilize clip for frames 0-21 - stabilizer.stabilizeClip(c1, 0, 21, true); - - // Get stabilized data - TransformParam tp = stabilizer.GetTransformParamData(20); - CamTrajectory ct = stabilizer.GetCamTrajectoryTrackedData(20); - - // // Compare if stabilized data is equal to pre-tested ones - int dx = tp.dx*1000; - int dy = tp.dy*1000; - int da = tp.da*1000; - int x = ct.x*1000; - int y = ct.y*1000; - int a = ct.a*1000; - - CHECK_EQUAL((int) (58), dx); - CHECK_EQUAL((int) (-88), dy); - CHECK_EQUAL((int) (7), da); - CHECK_EQUAL((int) (0), x); - CHECK_EQUAL((int) (-1), y); - CHECK_EQUAL((int) (0), a); - } - - - TEST(SaveLoad_Protobuf) - { - - // Create a video clip - std::stringstream path; - path << TEST_MEDIA_PATH << "test.avi"; - - // Open clip - openshot::Clip c1(path.str()); - c1.Open(); - - std::string json_data = R"proto( - { - "protobuf_data_path": "stabilizer.data", - "smoothing-window": 30 - } )proto"; - - // Create first stabilizer - CVStabilization stabilizer_1(json_data, processingController); - - // Stabilize clip for frames 0-20 - stabilizer_1.stabilizeClip(c1, 0, 20+1, true); - - // Get stabilized data - TransformParam tp_1 = stabilizer_1.GetTransformParamData(20); - CamTrajectory ct_1 = stabilizer_1.GetCamTrajectoryTrackedData(20); - - // Save stabilized data - stabilizer_1.SaveStabilizedData(); - - // Create second stabilizer - CVStabilization stabilizer_2(json_data, processingController); - - // Load stabilized data from first stabilizer protobuf data - stabilizer_2._LoadStabilizedData(); - - // Get stabilized data - TransformParam tp_2 = stabilizer_2.GetTransformParamData(20); - CamTrajectory ct_2 = stabilizer_2.GetCamTrajectoryTrackedData(20); - - // Compare first stabilizer data with second stabilizer data - CHECK_EQUAL((int) (tp_1.dx * 10000), (int) (tp_2.dx *10000)); - CHECK_EQUAL((int) (tp_1.dy * 10000), (int) (tp_2.dy * 10000)); - CHECK_EQUAL((int) (tp_1.da * 10000), (int) (tp_2.da * 10000)); - CHECK_EQUAL((int) (ct_1.x * 10000), (int) (ct_2.x * 10000)); - CHECK_EQUAL((int) (ct_1.y * 10000), (int) (ct_2.y * 10000)); - CHECK_EQUAL((int) (ct_1.a * 10000), (int) (ct_2.a * 10000)); - } - -} // SUITE(Frame_Tests) diff --git a/tests/CVTracker.cpp b/tests/CVTracker.cpp new file mode 100644 index 000000000..9548fc380 --- /dev/null +++ b/tests/CVTracker.cpp @@ -0,0 +1,144 @@ +/** + * @file + * @brief Unit tests for CVTracker + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2020 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include +#include + +#include + +#include "Clip.h" +#include "CVTracker.h" // for FrameData, CVTracker +#include "ProcessingController.h" + +using namespace openshot; + +// Just for the tracker constructor, it won't be used +ProcessingController tracker_pc; + +TEST_CASE( "Track_Video", "[libopenshot][opencv][tracker]" ) +{ + // Create a video clip + std::stringstream path; + path << TEST_MEDIA_PATH << "test.avi"; + + // Open clip + openshot::Clip c1(path.str()); + c1.Open(); + + std::string json_data = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker-type": "KCF", + "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} + } )proto"; + + // Create tracker + CVTracker kcfTracker(json_data, tracker_pc); + + // Track clip for frames 0-20 + kcfTracker.trackClip(c1, 0, 20, true); + // Get tracked data + FrameData fd = kcfTracker.GetTrackedData(20); + float x = fd.x1; + float y = fd.y1; + float width = fd.x2 - x; + float height = fd.y2 - y; + + // Compare if tracked data is equal to pre-tested ones + CHECK((int)(x * 640) == 259); + CHECK((int)(y * 360) == 131); + CHECK((int)(width * 640) == 180); + CHECK((int)(height * 360) == 166); +} + + +TEST_CASE( "SaveLoad_Protobuf", "[libopenshot][opencv][tracker]" ) +{ + + // Create a video clip + std::stringstream path; + path << TEST_MEDIA_PATH << "test.avi"; + + // Open clip + openshot::Clip c1(path.str()); + c1.Open(); + + std::string json_data = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker-type": "KCF", + "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} + } )proto"; + + + // Create first tracker + CVTracker kcfTracker_1(json_data, tracker_pc); + + // Track clip for frames 0-20 + kcfTracker_1.trackClip(c1, 0, 20, true); + + // Get tracked data + FrameData fd_1 = kcfTracker_1.GetTrackedData(20); + + float x_1 = fd_1.x1; + float y_1 = fd_1.y1; + float width_1 = fd_1.x2 - x_1; + float height_1 = fd_1.y2 - y_1; + + // Save tracked data + kcfTracker_1.SaveTrackedData(); + + std::string proto_data_1 = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker_type": "", + "region": {"x": -1, "y": -1, "width": -1, "height": -1, "first-frame": 0} + } )proto"; + + // Create second tracker + CVTracker kcfTracker_2(proto_data_1, tracker_pc); + + // Load tracked data from first tracker protobuf data + kcfTracker_2._LoadTrackedData(); + + // Get tracked data + FrameData fd_2 = kcfTracker_2.GetTrackedData(20); + + float x_2 = fd_2.x1; + float y_2 = fd_2.y1; + float width_2 = fd_2.x2 - x_2; + float height_2 = fd_2.y2 - y_2; + + // Compare first tracker data with second tracker data + CHECK((int)(x_1 * 640) == (int)(x_2 * 640)); + CHECK((int)(y_1 * 360) == (int)(y_2 * 360)); + CHECK((int)(width_1 * 640) == (int)(width_2 * 640)); + CHECK((int)(height_1 * 360) == (int)(height_2 * 360)); +} diff --git a/tests/CVTracker_Tests.cpp b/tests/CVTracker_Tests.cpp deleted file mode 100644 index 3229d1ca9..000000000 --- a/tests/CVTracker_Tests.cpp +++ /dev/null @@ -1,151 +0,0 @@ -/** - * @file - * @brief Unit tests for openshot::Frame - * @author Jonathan Thomas - * @author FeRD (Frank Dana) - * - * @ref License - */ - -/* LICENSE - * - * Copyright (c) 2008-2019 OpenShot Studios, LLC - * . This file is part of - * OpenShot Library (libopenshot), an open-source project dedicated to - * delivering high quality video editing and animation solutions to the - * world. For more information visit . - * - * OpenShot Library (libopenshot) is free software: you can redistribute it - * and/or modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * OpenShot Library (libopenshot) is distributed in the hope that it will be - * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with OpenShot Library. If not, see . - */ - -#include -#include - -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 -#include "Clip.h" -#include "CVTracker.h" // for FrameData, CVTracker -#include "ProcessingController.h" - -using namespace openshot; - -SUITE(CVTracker_Tests) -{ - - // Just for the tracker constructor, it won't be used - ProcessingController processingController; - - TEST(Track_Video) - { - // Create a video clip - std::stringstream path; - path << TEST_MEDIA_PATH << "test.avi"; - - // Open clip - openshot::Clip c1(path.str()); - c1.Open(); - - std::string json_data = R"proto( - { - "protobuf_data_path": "kcf_tracker.data", - "tracker-type": "KCF", - "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} - } )proto"; - - // Create tracker - CVTracker kcfTracker(json_data, processingController); - - // Track clip for frames 0-20 - kcfTracker.trackClip(c1, 0, 20, true); - // Get tracked data - FrameData fd = kcfTracker.GetTrackedData(20); - float x = fd.x1; - float y = fd.y1; - float width = fd.x2 - x; - float height = fd.y2 - y; - - // Compare if tracked data is equal to pre-tested ones - CHECK_EQUAL(259, (int)(x * 640)); - CHECK_EQUAL(131, (int)(y * 360)); - CHECK_EQUAL(180, (int)(width * 640)); - CHECK_EQUAL(166, (int)(height * 360)); - } - - - TEST(SaveLoad_Protobuf) - { - - // Create a video clip - std::stringstream path; - path << TEST_MEDIA_PATH << "test.avi"; - - // Open clip - openshot::Clip c1(path.str()); - c1.Open(); - - std::string json_data = R"proto( - { - "protobuf_data_path": "kcf_tracker.data", - "tracker-type": "KCF", - "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} - } )proto"; - - - // Create first tracker - CVTracker kcfTracker_1(json_data, processingController); - - // Track clip for frames 0-20 - kcfTracker_1.trackClip(c1, 0, 20, true); - - // Get tracked data - FrameData fd_1 = kcfTracker_1.GetTrackedData(20); - - float x_1 = fd_1.x1; - float y_1 = fd_1.y1; - float width_1 = fd_1.x2 - x_1; - float height_1 = fd_1.y2 - y_1; - - // Save tracked data - kcfTracker_1.SaveTrackedData(); - - std::string proto_data_1 = R"proto( - { - "protobuf_data_path": "kcf_tracker.data", - "tracker_type": "", - "region": {"x": -1, "y": -1, "width": -1, "height": -1, "first-frame": 0} - } )proto"; - - // Create second tracker - CVTracker kcfTracker_2(proto_data_1, processingController); - - // Load tracked data from first tracker protobuf data - kcfTracker_2._LoadTrackedData(); - - // Get tracked data - FrameData fd_2 = kcfTracker_2.GetTrackedData(20); - - float x_2 = fd_2.x1; - float y_2 = fd_2.y1; - float width_2 = fd_2.x2 - x_2; - float height_2 = fd_2.y2 - y_2; - - // Compare first tracker data with second tracker data - CHECK_EQUAL((int)(x_1 * 640), (int)(x_2 * 640)); - CHECK_EQUAL((int)(y_1 * 360), (int)(y_2 * 360)); - CHECK_EQUAL((int)(width_1 * 640), (int)(width_2 * 640)); - CHECK_EQUAL((int)(height_1 * 360), (int)(height_2 * 360)); - } - -} // SUITE(Frame_Tests) diff --git a/tests/CacheDisk.cpp b/tests/CacheDisk.cpp new file mode 100644 index 000000000..01df3f6ee --- /dev/null +++ b/tests/CacheDisk.cpp @@ -0,0 +1,260 @@ +/** + * @file + * @brief Unit tests for openshot::Cache + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include +#include + +#include + +#include "CacheDisk.h" +#include "Json.h" + +using namespace openshot; + +TEST_CASE( "constructor", "[libopenshot][cachedisk]" ) +{ + QDir temp_path = QDir::tempPath() + QString("/constructor/"); + + // Create cache object + CacheDisk c(temp_path.path().toStdString(), "PPM", 1.0, 0.25); + + for (int i = 0; i < 20; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + c.Add(f); + } + + CHECK(c.Count() == 20); // Cache should have all frames, with no limit + CHECK(c.GetMaxBytes() == 0); // Max frames should default to 0 + + // Clean up + c.Clear(); + temp_path.removeRecursively(); +} + +TEST_CASE( "MaxBytes constructor", "[libopenshot][cachedisk]" ) +{ + QDir temp_path = QDir::tempPath() + QString("/maxbytes-constructor/"); + + // Create cache object + CacheDisk c(temp_path.path().toStdString(), "PPM", 1.0, 0.25, 20 * 1024); + + CHECK(c.GetMaxBytes() == 20 * 1024); + + for (int i = 0; i < 20; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + c.Add(f); + } + + CHECK(c.Count() == 20); + CHECK(c.GetMaxBytes() == 20 * 1024); + + // Clean up + c.Clear(); + temp_path.removeRecursively(); +} + +TEST_CASE( "SetMaxBytes", "[libopenshot][cachedisk]" ) +{ + QDir temp_path = QDir::tempPath() + QString("/set_max_bytes/"); + + // Create cache object + CacheDisk c(temp_path.path().toStdString(), "PPM", 1.0, 0.25); + + // Add frames to disk cache + for (int i = 0; i < 20; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + // Add some picture data + f->AddColor(1280, 720, "Blue"); + f->ResizeAudio(2, 500, 44100, LAYOUT_STEREO); + f->AddAudioSilence(500); + c.Add(f); + } + + CHECK(c.GetMaxBytes() == 0); // Cache defaults max frames to -1, unlimited frames + + // Set max frames + c.SetMaxBytes(8 * 1024); + CHECK(c.GetMaxBytes() == 8 * 1024); + + // Set max frames + c.SetMaxBytes(4 * 1024); + CHECK(c.GetMaxBytes() == 4 * 1024); + + // Read frames from disk cache + auto f = c.GetFrame(5); + CHECK(f->GetWidth() == 320); + CHECK(f->GetHeight() == 180); + CHECK(f->GetAudioChannelsCount() == 2); + CHECK(f->GetAudioSamplesCount() == 500); + CHECK(f->ChannelsLayout() == LAYOUT_STEREO); + CHECK(f->SampleRate() == 44100); + + // Check count of cache + CHECK(c.Count() == 20); + + // Clear cache + c.Clear(); + + // Check count of cache + CHECK(c.Count() == 0); + + // Delete cache directory + temp_path.removeRecursively(); +} + +TEST_CASE( "freshen frames", "[libopensoht][cachedisk]" ) +{ + QDir temp_path = QDir::tempPath() + QString("/freshen-frames/"); + + // Create cache object + CacheDisk c(temp_path.path().toStdString(), "PPM", 1.0, 0.25); + + auto f1 = std::make_shared(1, 1280, 1024, "#FRIST!"); + + c.Add(f1); + + for(int i = 2; i <= 20; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + // Add some picture data + f->AddColor(1280, 720, "Blue"); + f->ResizeAudio(2, 500, 44100, LAYOUT_STEREO); + f->AddAudioSilence(500); + c.Add(f); + } + + CHECK(c.Count() == 20); + + // Capture current size of cache + auto start_bytes = c.GetBytes(); + + // Re-add existing frame a few times + for (int x = 0; x < 5; x++) + { + c.Add(f1); + } + + // Check that size hasn't changed + CHECK(c.Count() == 20); + CHECK(c.GetBytes() == start_bytes); + + // Clean up + c.Clear(); + temp_path.removeRecursively(); +} + +TEST_CASE( "multiple remove", "[libopenshot][cachedisk]" ) +{ + QDir temp_path = QDir::tempPath() + QString("/multiple-remove/"); + CacheDisk c(temp_path.path().toStdString(), "PPM", 1.0, 0.25); + + // Add frames to disk cache + for (int i = 1; i <= 20; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + // Add some picture data + f->AddColor(1280, 720, "Blue"); + f->ResizeAudio(2, 500, 44100, LAYOUT_STEREO); + f->AddAudioSilence(500); + c.Add(f); + } + + CHECK(c.Count() == 20); + + // Remove a single frame + c.Remove(5); + CHECK(c.Count() == 19); + + // Remove a range of frames + c.Remove(4, 20); + CHECK(c.Count() == 3); + + // Remove the rest + c.Remove(1, 3); + CHECK(c.Count() == 0); + + // Delete cache directory + temp_path.removeRecursively(); +} + +TEST_CASE( "JSON", "[libopenshot][cachedisk]" ) +{ + QDir temp_path = QDir::tempPath() + QString("/cache_json/"); + + // Create cache object + CacheDisk c(temp_path.path().toStdString(), "PPM", 1.0, 0.25); + + // Add some frames (out of order) + auto f3 = std::make_shared(3, 1280, 720, "Blue", 500, 2); + c.Add(f3); + CHECK((int)c.JsonValue()["ranges"].size() == 1); + CHECK(c.JsonValue()["version"].asString() == "1"); + + // Add some frames (out of order) + auto f1 = std::make_shared(1, 1280, 720, "Blue", 500, 2); + c.Add(f1); + CHECK((int)c.JsonValue()["ranges"].size() == 2); + CHECK(c.JsonValue()["version"].asString() == "2"); + + // Add some frames (out of order) + auto f2 = std::make_shared(2, 1280, 720, "Blue", 500, 2); + c.Add(f2); + CHECK((int)c.JsonValue()["ranges"].size() == 1); + CHECK(c.JsonValue()["version"].asString() == "3"); + + // Add some frames (out of order) + auto f5 = std::make_shared(5, 1280, 720, "Blue", 500, 2); + c.Add(f5); + CHECK((int)c.JsonValue()["ranges"].size() == 2); + CHECK(c.JsonValue()["version"].asString() == "4"); + + // Add some frames (out of order) + auto f4 = std::make_shared(4, 1280, 720, "Blue", 500, 2); + c.Add(f4); + CHECK((int)c.JsonValue()["ranges"].size() == 1); + CHECK(c.JsonValue()["version"].asString() == "5"); + + // Delete cache directory + c.Clear(); + temp_path.removeRecursively(); +} diff --git a/tests/CacheMemory.cpp b/tests/CacheMemory.cpp new file mode 100644 index 000000000..9b8e54421 --- /dev/null +++ b/tests/CacheMemory.cpp @@ -0,0 +1,351 @@ +/** + * @file + * @brief Unit tests for openshot::CacheMemory + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include +#include + +#include + +#include "CacheMemory.h" +#include "Json.h" + +using namespace openshot; + +TEST_CASE( "default constructor", "[libopenshot][cachememory]" ) +{ + // Create cache object + CacheMemory c; + + // Loop 50 times + for (int i = 0; i < 50; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + c.Add(f); + } + + CHECK(c.Count() == 50); // Cache should have all frames, with no limit + CHECK(c.GetMaxBytes() == 0); // Max frames should default to 0 +} + +TEST_CASE( "MaxBytes constructor", "[libopenshot][cachememory]" ) +{ + // Create cache object (with a max of 5 previous items) + CacheMemory c(250 * 1024); + + // Loop 20 times + for (int i = 30; i > 0; i--) + { + // Add blank frame to the cache + auto f = std::make_shared(i, 320, 240, "#000000"); + f->AddColor(320, 240, "#000000"); + c.Add(f); + } + + // Cache should have all 20 + CHECK(c.Count() == 20); + + // Add 10 frames again + for (int i = 10; i > 0; i--) + { + // Add blank frame to the cache + auto f = std::make_shared(i, 320, 240, "#000000"); + f->AddColor(320, 240, "#000000"); + c.Add(f); + } + + // Count should be 20, since we're more frames than can be cached. + CHECK(c.Count() == 20); + + // Check which items the cache kept + CHECK(c.GetFrame(1) != nullptr); + CHECK(c.GetFrame(10) != nullptr); + CHECK(c.GetFrame(11) != nullptr); + CHECK(c.GetFrame(19) != nullptr); + CHECK(c.GetFrame(20) != nullptr); + CHECK(c.GetFrame(21) == nullptr); + CHECK(c.GetFrame(30) == nullptr); +} + +TEST_CASE( "Clear", "[libopenshot][cachememory]" ) +{ + // Create cache object + CacheMemory c(250 * 1024); + + // Loop 10 times + for (int i = 0; i < 10; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + c.Add(f); + } + + // Cache should only have 10 items + CHECK(c.Count() == 10); + + // Clear Cache + c.Clear(); + + // Cache should now have 0 items + CHECK(c.Count() == 0); +} + +TEST_CASE( "add duplicate Frames", "[libopenshot][cachememory]" ) +{ + // Create cache object + CacheMemory c(250 * 1024); + + // Loop 10 times + for (int i = 0; i < 10; i++) + { + // Add blank frame to the cache (each frame is #1) + auto f = std::make_shared(); + c.Add(f); + } + + // Cache should only have 1 items (since all frames were frame #1) + CHECK(c.Count() == 1); +} + +TEST_CASE( "check if Frame exists", "[libopenshot][cachememory]" ) +{ + // Create cache object + CacheMemory c(250 * 1024); + + // Loop 5 times + for (int i = 1; i < 6; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + c.Add(f); + } + + // Check if certain frames exists (only 1-5 exist) + CHECK(c.GetFrame(0) == nullptr); + CHECK(c.GetFrame(1) != nullptr); + CHECK(c.GetFrame(2) != nullptr); + CHECK(c.GetFrame(3) != nullptr); + CHECK(c.GetFrame(4) != nullptr); + CHECK(c.GetFrame(5) != nullptr); + CHECK(c.GetFrame(6) == nullptr); +} + +TEST_CASE( "GetFrame", "[libopenshot][cachememory]" ) +{ + // Create cache object + CacheMemory c(250 * 1024); + + // Create 3 frames + Frame *red = new Frame(1, 300, 300, "red"); + Frame *blue = new Frame(2, 400, 400, "blue"); + Frame *green = new Frame(3, 500, 500, "green"); + + // Add frames to cache + c.Add(std::shared_ptr(red)); + c.Add(std::shared_ptr(blue)); + c.Add(std::shared_ptr(green)); + + // Get frames + CHECK(c.GetFrame(0) == nullptr); + CHECK(c.GetFrame(4) == nullptr); + + // Check if certain frames exists (only 1-5 exist) + CHECK(c.GetFrame(1)->number == 1); + CHECK(c.GetFrame(2)->number == 2); + CHECK(c.GetFrame(3)->number == 3); +} + +TEST_CASE( "GetSmallest", "[libopenshot][cachememory]" ) +{ + // Create cache object (with a max of 10 items) + CacheMemory c(250 * 1024); + + // Create 3 frames + auto red = std::make_shared(1, 300, 300, "red"); + auto blue = std::make_shared(2, 400, 400, "blue"); + auto green = std::make_shared(3, 500, 500, "green"); + + // Add frames to cache + c.Add(red); + c.Add(blue); + + // Check if frame 1 is the front + CHECK(c.GetSmallestFrame()->number == 1); + + c.Add(green); + + // Check if frame 1 is STILL the front + CHECK(c.GetSmallestFrame()->number == 1); + + c.Remove(1); + + // Check if frame 2 is now the front + CHECK(c.GetSmallestFrame()->number == 2); +} + +TEST_CASE( "Remove", "[libopenshot][cachememory]" ) +{ + // Create cache object (with a max of 10 items) + CacheMemory c(250 * 1024); + + // Create 3 frames + auto red = std::make_shared(1, 300, 300, "red"); + auto blue = std::make_shared(2, 400, 400, "blue"); + auto green = std::make_shared(3, 500, 500, "green"); + + // Add frames to cache + c.Add(red); + c.Add(blue); + c.Add(green); + + // Check if count is 3 + CHECK(c.Count() == 3); + + // Check if frame 2 exists + CHECK(c.GetFrame(2) != nullptr); + + // Remove frame 2 + c.Remove(2); + + // Check if frame 2 exists + CHECK(c.GetFrame(2) == nullptr); + + // Check if count is 2 + CHECK(c.Count() == 2); + + // Remove frame 1 + c.Remove(1); + + // Check if frame 1 exists + CHECK(c.GetFrame(1) == nullptr); + + // Check if count is 1 + CHECK(c.Count() == 1); +} + +TEST_CASE( "SetMaxBytes", "[libopenshot][cachememory]" ) +{ + // Create cache object + CacheMemory c; + + // Loop 20 times + for (int i = 0; i < 20; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + c.Add(f); + } + + CHECK(c.GetMaxBytes() == 0); // Cache defaults max frames to -1, unlimited frames + + // Set max frames + c.SetMaxBytes(8 * 1024); + CHECK(c.GetMaxBytes() == 8 * 1024); + + // Set max frames + c.SetMaxBytes(4 * 1024); + CHECK(c.GetMaxBytes() == 4 * 1024); +} + +TEST_CASE( "multiple remove", "[libopenshot][cachememory]" ) +{ + // Create cache object (using platform /temp/ directory) + CacheMemory c; + + // Add frames to disk cache + for (int i = 1; i <= 20; i++) + { + // Add blank frame to the cache + auto f = std::make_shared(); + f->number = i; + // Add some picture data + f->AddColor(1280, 720, "Blue"); + f->ResizeAudio(2, 500, 44100, LAYOUT_STEREO); + f->AddAudioSilence(500); + c.Add(f); + } + + CHECK(c.Count() == 20); + + // Remove a single frame + c.Remove(17); + CHECK(c.Count() == 19); + + // Remove a range of frames + c.Remove(16, 18); + CHECK(c.Count() == 17); + + // Remove all remaining frames + c.Remove(1, 20); + CHECK(c.Count() == 0); +} + + + +TEST_CASE( "JSON", "[libopenshot][cachememory]" ) +{ + // Create memory cache object + CacheMemory c; + + // Add some frames (out of order) + auto f3 = std::make_shared(3, 1280, 720, "Blue", 500, 2); + c.Add(f3); + CHECK((int)c.JsonValue()["ranges"].size() == 1); + CHECK(c.JsonValue()["version"].asString() == "1"); + + // Add some frames (out of order) + auto f1 = std::make_shared(1, 1280, 720, "Blue", 500, 2); + c.Add(f1); + CHECK((int)c.JsonValue()["ranges"].size() == 2); + CHECK(c.JsonValue()["version"].asString() == "2"); + + // Add some frames (out of order) + auto f2 = std::make_shared(2, 1280, 720, "Blue", 500, 2); + c.Add(f2); + CHECK((int)c.JsonValue()["ranges"].size() == 1); + CHECK(c.JsonValue()["version"].asString() == "3"); + + // Add some frames (out of order) + auto f5 = std::make_shared(5, 1280, 720, "Blue", 500, 2); + c.Add(f5); + CHECK((int)c.JsonValue()["ranges"].size() == 2); + CHECK(c.JsonValue()["version"].asString() == "4"); + + // Add some frames (out of order) + auto f4 = std::make_shared(4, 1280, 720, "Blue", 500, 2); + c.Add(f4); + CHECK((int)c.JsonValue()["ranges"].size() == 1); + CHECK(c.JsonValue()["version"].asString() == "5"); + +} diff --git a/tests/Cache_Tests.cpp b/tests/Cache_Tests.cpp deleted file mode 100644 index 1220cf14b..000000000 --- a/tests/Cache_Tests.cpp +++ /dev/null @@ -1,470 +0,0 @@ -/** - * @file - * @brief Unit tests for openshot::Cache - * @author Jonathan Thomas - * - * @ref License - */ - -/* LICENSE - * - * Copyright (c) 2008-2019 OpenShot Studios, LLC - * . This file is part of - * OpenShot Library (libopenshot), an open-source project dedicated to - * delivering high quality video editing and animation solutions to the - * world. For more information visit . - * - * OpenShot Library (libopenshot) is free software: you can redistribute it - * and/or modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * OpenShot Library (libopenshot) is distributed in the hope that it will be - * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with OpenShot Library. If not, see . - */ - -#include - -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 -#include "CacheDisk.h" -#include "CacheMemory.h" -#include "Json.h" - -#include - -using namespace openshot; - -TEST(Cache_Default_Constructor) -{ - // Create cache object - CacheMemory c; - - // Loop 50 times - for (int i = 0; i < 50; i++) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame()); - f->number = i; - c.Add(f); - } - - CHECK_EQUAL(50, c.Count()); // Cache should have all frames, with no limit - CHECK_EQUAL(0, c.GetMaxBytes()); // Max frames should default to 0 -} - -TEST(Cache_Max_Bytes_Constructor) -{ - // Create cache object (with a max of 5 previous items) - CacheMemory c(250 * 1024); - - // Loop 20 times - for (int i = 30; i > 0; i--) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame(i, 320, 240, "#000000")); - f->AddColor(320, 240, "#000000"); - c.Add(f); - } - - // Cache should have all 20 - CHECK_EQUAL(20, c.Count()); - - // Add 10 frames again - for (int i = 10; i > 0; i--) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame(i, 320, 240, "#000000")); - f->AddColor(320, 240, "#000000"); - c.Add(f); - } - - // Count should be 20, since we're more frames than can be cached. - CHECK_EQUAL(20, c.Count()); - - // Check which items the cache kept - CHECK_EQUAL(true, c.GetFrame(1) != NULL); - CHECK_EQUAL(true, c.GetFrame(10) != NULL); - CHECK_EQUAL(true, c.GetFrame(11) != NULL); - CHECK_EQUAL(true, c.GetFrame(19) != NULL); - CHECK_EQUAL(true, c.GetFrame(20) != NULL); - CHECK_EQUAL(false, c.GetFrame(21) != NULL); - CHECK_EQUAL(false, c.GetFrame(30) != NULL); -} - -TEST(Cache_Clear) -{ - // Create cache object - CacheMemory c(250 * 1024); - - // Loop 10 times - for (int i = 0; i < 10; i++) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame()); - f->number = i; - c.Add(f); - } - - // Cache should only have 10 items - CHECK_EQUAL(10, c.Count()); - - // Clear Cache - c.Clear(); - - // Cache should now have 0 items - CHECK_EQUAL(0, c.Count()); -} - -TEST(Cache_Add_Duplicate_Frames) -{ - // Create cache object - CacheMemory c(250 * 1024); - - // Loop 10 times - for (int i = 0; i < 10; i++) - { - // Add blank frame to the cache (each frame is #1) - std::shared_ptr f(new Frame()); - c.Add(f); - } - - // Cache should only have 1 items (since all frames were frame #1) - CHECK_EQUAL(1, c.Count()); -} - -TEST(Cache_Check_If_Frame_Exists) -{ - // Create cache object - CacheMemory c(250 * 1024); - - // Loop 5 times - for (int i = 1; i < 6; i++) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame()); - f->number = i; - c.Add(f); - } - - // Check if certain frames exists (only 1-5 exist) - CHECK_EQUAL(false, c.GetFrame(0) != NULL); - CHECK_EQUAL(true, c.GetFrame(1) != NULL); - CHECK_EQUAL(true, c.GetFrame(2) != NULL); - CHECK_EQUAL(true, c.GetFrame(3) != NULL); - CHECK_EQUAL(true, c.GetFrame(4) != NULL); - CHECK_EQUAL(true, c.GetFrame(5) != NULL); - CHECK_EQUAL(false, c.GetFrame(6) != NULL); -} - -TEST(Cache_GetFrame) -{ - // Create cache object - CacheMemory c(250 * 1024); - - // Create 3 frames - Frame *red = new Frame(1, 300, 300, "red"); - Frame *blue = new Frame(2, 400, 400, "blue"); - Frame *green = new Frame(3, 500, 500, "green"); - - // Add frames to cache - c.Add(std::shared_ptr(red)); - c.Add(std::shared_ptr(blue)); - c.Add(std::shared_ptr(green)); - - // Get frames - CHECK_EQUAL(true, c.GetFrame(0) == NULL); - CHECK_EQUAL(true, c.GetFrame(4) == NULL); - - // Check if certain frames exists (only 1-5 exist) - CHECK_EQUAL(1, c.GetFrame(1)->number); - CHECK_EQUAL(2, c.GetFrame(2)->number); - CHECK_EQUAL(3, c.GetFrame(3)->number); -} - -TEST(Cache_GetSmallest) -{ - // Create cache object (with a max of 10 items) - CacheMemory c(250 * 1024); - - // Create 3 frames - Frame *red = new Frame(1, 300, 300, "red"); - Frame *blue = new Frame(2, 400, 400, "blue"); - Frame *green = new Frame(3, 500, 500, "green"); - - // Add frames to cache - c.Add(std::shared_ptr(red)); - c.Add(std::shared_ptr(blue)); - c.Add(std::shared_ptr(green)); - - // Check if frame 1 is the front - CHECK_EQUAL(1, c.GetSmallestFrame()->number); - - // Check if frame 1 is STILL the front - CHECK_EQUAL(1, c.GetSmallestFrame()->number); - - // Erase frame 1 - c.Remove(1); - - // Check if frame 2 is the front - CHECK_EQUAL(2, c.GetSmallestFrame()->number); -} - -TEST(Cache_Remove) -{ - // Create cache object (with a max of 10 items) - CacheMemory c(250 * 1024); - - // Create 3 frames - Frame *red = new Frame(1, 300, 300, "red"); - Frame *blue = new Frame(2, 400, 400, "blue"); - Frame *green = new Frame(3, 500, 500, "green"); - - // Add frames to cache - c.Add(std::shared_ptr(red)); - c.Add(std::shared_ptr(blue)); - c.Add(std::shared_ptr(green)); - - // Check if count is 3 - CHECK_EQUAL(3, c.Count()); - - // Check if frame 2 exists - CHECK_EQUAL(true, c.GetFrame(2) != NULL); - - // Remove frame 2 - c.Remove(2); - - // Check if frame 2 exists - CHECK_EQUAL(false, c.GetFrame(2) != NULL); - - // Check if count is 2 - CHECK_EQUAL(2, c.Count()); - - // Remove frame 1 - c.Remove(1); - - // Check if frame 1 exists - CHECK_EQUAL(false, c.GetFrame(1) != NULL); - - // Check if count is 1 - CHECK_EQUAL(1, c.Count()); -} - -TEST(Cache_Set_Max_Bytes) -{ - // Create cache object - CacheMemory c; - - // Loop 20 times - for (int i = 0; i < 20; i++) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame()); - f->number = i; - c.Add(f); - } - - CHECK_EQUAL(0, c.GetMaxBytes()); // Cache defaults max frames to -1, unlimited frames - - // Set max frames - c.SetMaxBytes(8 * 1024); - CHECK_EQUAL(8 * 1024, c.GetMaxBytes()); - - // Set max frames - c.SetMaxBytes(4 * 1024); - CHECK_EQUAL(4 * 1024, c.GetMaxBytes()); -} - -TEST(Cache_Multiple_Remove) -{ - // Create cache object (using platform /temp/ directory) - CacheMemory c; - - // Add frames to disk cache - for (int i = 1; i <= 20; i++) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame()); - f->number = i; - // Add some picture data - f->AddColor(1280, 720, "Blue"); - f->ResizeAudio(2, 500, 44100, LAYOUT_STEREO); - f->AddAudioSilence(500); - c.Add(f); - } - - // Should have 20 frames - CHECK_EQUAL(20, c.Count()); - - // Remove all 20 frames - c.Remove(1, 20); - - // Should have 20 frames - CHECK_EQUAL(0, c.Count()); -} - -TEST(CacheDisk_Set_Max_Bytes) -{ - // Create cache object (using platform /temp/ directory) - CacheDisk c("", "PPM", 1.0, 0.25); - - // Add frames to disk cache - for (int i = 0; i < 20; i++) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame()); - f->number = i; - // Add some picture data - f->AddColor(1280, 720, "Blue"); - f->ResizeAudio(2, 500, 44100, LAYOUT_STEREO); - f->AddAudioSilence(500); - c.Add(f); - } - - CHECK_EQUAL(0, c.GetMaxBytes()); // Cache defaults max frames to -1, unlimited frames - - // Set max frames - c.SetMaxBytes(8 * 1024); - CHECK_EQUAL(8 * 1024, c.GetMaxBytes()); - - // Set max frames - c.SetMaxBytes(4 * 1024); - CHECK_EQUAL(4 * 1024, c.GetMaxBytes()); - - // Read frames from disk cache - std::shared_ptr f = c.GetFrame(5); - CHECK_EQUAL(320, f->GetWidth()); - CHECK_EQUAL(180, f->GetHeight()); - CHECK_EQUAL(2, f->GetAudioChannelsCount()); - CHECK_EQUAL(500, f->GetAudioSamplesCount()); - CHECK_EQUAL(LAYOUT_STEREO, f->ChannelsLayout()); - CHECK_EQUAL(44100, f->SampleRate()); - - // Check count of cache - CHECK_EQUAL(20, c.Count()); - - // Clear cache - c.Clear(); - - // Check count of cache - CHECK_EQUAL(0, c.Count()); - - // Delete cache directory - QDir path = QDir::tempPath() + QString("/preview-cache/"); - path.removeRecursively(); -} - -TEST(CacheDisk_Multiple_Remove) -{ - // Create cache object (using platform /temp/ directory) - CacheDisk c("", "PPM", 1.0, 0.25); - - // Add frames to disk cache - for (int i = 1; i <= 20; i++) - { - // Add blank frame to the cache - std::shared_ptr f(new Frame()); - f->number = i; - // Add some picture data - f->AddColor(1280, 720, "Blue"); - f->ResizeAudio(2, 500, 44100, LAYOUT_STEREO); - f->AddAudioSilence(500); - c.Add(f); - } - - // Should have 20 frames - CHECK_EQUAL(20, c.Count()); - - // Remove all 20 frames - c.Remove(1, 20); - - // Should have 20 frames - CHECK_EQUAL(0, c.Count()); - - // Delete cache directory - QDir path = QDir::tempPath() + QString("/preview-cache/"); - path.removeRecursively(); -} - -TEST(CacheDisk_JSON) -{ - // Create cache object (using platform /temp/ directory) - CacheDisk c("", "PPM", 1.0, 0.25); - - // Add some frames (out of order) - std::shared_ptr f3(new Frame(3, 1280, 720, "Blue", 500, 2)); - c.Add(f3); - CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("1", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f1(new Frame(1, 1280, 720, "Blue", 500, 2)); - c.Add(f1); - CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("2", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f2(new Frame(2, 1280, 720, "Blue", 500, 2)); - c.Add(f2); - CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("3", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f5(new Frame(5, 1280, 720, "Blue", 500, 2)); - c.Add(f5); - CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("4", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f4(new Frame(4, 1280, 720, "Blue", 500, 2)); - c.Add(f4); - CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("5", c.JsonValue()["version"].asString()); - - // Delete cache directory - QDir path = QDir::tempPath() + QString("/preview-cache/"); - path.removeRecursively(); -} - -TEST(CacheMemory_JSON) -{ - // Create memory cache object - CacheMemory c; - - // Add some frames (out of order) - std::shared_ptr f3(new Frame(3, 1280, 720, "Blue", 500, 2)); - c.Add(f3); - CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("1", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f1(new Frame(1, 1280, 720, "Blue", 500, 2)); - c.Add(f1); - CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("2", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f2(new Frame(2, 1280, 720, "Blue", 500, 2)); - c.Add(f2); - CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("3", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f5(new Frame(5, 1280, 720, "Blue", 500, 2)); - c.Add(f5); - CHECK_EQUAL(2, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("4", c.JsonValue()["version"].asString()); - - // Add some frames (out of order) - std::shared_ptr f4(new Frame(4, 1280, 720, "Blue", 500, 2)); - c.Add(f4); - CHECK_EQUAL(1, (int)c.JsonValue()["ranges"].size()); - CHECK_EQUAL("5", c.JsonValue()["version"].asString()); - -} diff --git a/tests/Clip_Tests.cpp b/tests/Clip.cpp similarity index 62% rename from tests/Clip_Tests.cpp rename to tests/Clip.cpp index 5d8ab43ce..31842eb3d 100644 --- a/tests/Clip_Tests.cpp +++ b/tests/Clip.cpp @@ -31,15 +31,8 @@ #include #include -#include "UnitTest++.h" +#include -// Work around older versions of UnitTest++ without REQUIRE -#ifndef REQUIRE - #define REQUIRE -#endif - -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "Clip.h" #include "Frame.h" #include "Fraction.h" @@ -48,25 +41,22 @@ using namespace openshot; -SUITE(Clip) -{ - -TEST(Default_Constructor) +TEST_CASE( "default constructor", "[libopenshot][clip]" ) { // Create a empty clip Clip c1; // Check basic settings - CHECK_EQUAL(ANCHOR_CANVAS, c1.anchor); - CHECK_EQUAL(GRAVITY_CENTER, c1.gravity); - CHECK_EQUAL(SCALE_FIT, c1.scale); - CHECK_EQUAL(0, c1.Layer()); - CHECK_CLOSE(0.0f, c1.Position(), 0.00001); - CHECK_CLOSE(0.0f, c1.Start(), 0.00001); - CHECK_CLOSE(0.0f, c1.End(), 0.00001); + CHECK(c1.anchor == ANCHOR_CANVAS); + CHECK(c1.gravity == GRAVITY_CENTER); + CHECK(c1.scale == SCALE_FIT); + CHECK(c1.Layer() == 0); + CHECK(c1.Position() == Approx(0.0f).margin(0.00001)); + CHECK(c1.Start() == Approx(0.0f).margin(0.00001)); + CHECK(c1.End() == Approx(0.0f).margin(0.00001)); } -TEST(Clip_Constructor) +TEST_CASE( "path string constructor", "[libopenshot][clip]" ) { // Create a empty clip std::stringstream path; @@ -75,29 +65,29 @@ TEST(Clip_Constructor) c1.Open(); // Check basic settings - CHECK_EQUAL(ANCHOR_CANVAS, c1.anchor); - CHECK_EQUAL(GRAVITY_CENTER, c1.gravity); - CHECK_EQUAL(SCALE_FIT, c1.scale); - CHECK_EQUAL(0, c1.Layer()); - CHECK_CLOSE(0.0f, c1.Position(), 0.00001); - CHECK_CLOSE(0.0f, c1.Start(), 0.00001); - CHECK_CLOSE(4.39937f, c1.End(), 0.00001); + CHECK(c1.anchor == ANCHOR_CANVAS); + CHECK(c1.gravity == GRAVITY_CENTER); + CHECK(c1.scale == SCALE_FIT); + CHECK(c1.Layer() == 0); + CHECK(c1.Position() == Approx(0.0f).margin(0.00001)); + CHECK(c1.Start() == Approx(0.0f).margin(0.00001)); + CHECK(c1.End() == Approx(4.39937f).margin(0.00001)); } -TEST(Basic_Gettings_and_Setters) +TEST_CASE( "basic getters and setters", "[libopenshot][clip]" ) { // Create a empty clip Clip c1; // Check basic settings - CHECK_THROW(c1.Open(), ReaderClosed); - CHECK_EQUAL(ANCHOR_CANVAS, c1.anchor); - CHECK_EQUAL(GRAVITY_CENTER, c1.gravity); - CHECK_EQUAL(SCALE_FIT, c1.scale); - CHECK_EQUAL(0, c1.Layer()); - CHECK_CLOSE(0.0f, c1.Position(), 0.00001); - CHECK_CLOSE(0.0f, c1.Start(), 0.00001); - CHECK_CLOSE(0.0f, c1.End(), 0.00001); + CHECK_THROWS_AS(c1.Open(), ReaderClosed); + CHECK(c1.anchor == ANCHOR_CANVAS); + CHECK(c1.gravity == GRAVITY_CENTER); + CHECK(c1.scale == SCALE_FIT); + CHECK(c1.Layer() == 0); + CHECK(c1.Position() == Approx(0.0f).margin(0.00001)); + CHECK(c1.Start() == Approx(0.0f).margin(0.00001)); + CHECK(c1.End() == Approx(0.0f).margin(0.00001)); // Change some properties c1.Layer(1); @@ -105,13 +95,13 @@ TEST(Basic_Gettings_and_Setters) c1.Start(3.5); c1.End(10.5); - CHECK_EQUAL(1, c1.Layer()); - CHECK_CLOSE(5.0f, c1.Position(), 0.00001); - CHECK_CLOSE(3.5f, c1.Start(), 0.00001); - CHECK_CLOSE(10.5f, c1.End(), 0.00001); + CHECK(c1.Layer() == 1); + CHECK(c1.Position() == Approx(5.0f).margin(0.00001)); + CHECK(c1.Start() == Approx(3.5f).margin(0.00001)); + CHECK(c1.End() == Approx(10.5f).margin(0.00001)); } -TEST(Properties) +TEST_CASE( "properties", "[libopenshot][clip]" ) { // Create a empty clip Clip c1; @@ -136,11 +126,11 @@ TEST(Properties) properties.c_str(), properties.c_str() + properties.size(), &root, &errors ); - CHECK_EQUAL(true, success); + CHECK(success == true); // Check for specific things - CHECK_CLOSE(1.0f, root["alpha"]["value"].asDouble(), 0.01); - CHECK_EQUAL(true, root["alpha"]["keyframe"].asBool()); + CHECK(root["alpha"]["value"].asDouble() == Approx(1.0f).margin(0.01)); + CHECK(root["alpha"]["keyframe"].asBool() == true); // Get properties JSON string at frame 250 properties = c1.PropertiesJSON(250); @@ -151,11 +141,11 @@ TEST(Properties) properties.c_str(), properties.c_str() + properties.size(), &root, &errors ); - REQUIRE CHECK_EQUAL(true, success); + CHECK(success == true); // Check for specific things - CHECK_CLOSE(0.5f, root["alpha"]["value"].asDouble(), 0.01); - CHECK_EQUAL(false, root["alpha"]["keyframe"].asBool()); + CHECK(root["alpha"]["value"].asDouble() == Approx(0.5f).margin(0.01)); + CHECK_FALSE(root["alpha"]["keyframe"].asBool()); // Get properties JSON string at frame 250 (again) properties = c1.PropertiesJSON(250); @@ -166,10 +156,10 @@ TEST(Properties) properties.c_str(), properties.c_str() + properties.size(), &root, &errors ); - REQUIRE CHECK_EQUAL(true, success); + CHECK(success == true); // Check for specific things - CHECK_EQUAL(false, root["alpha"]["keyframe"].asBool()); + CHECK_FALSE(root["alpha"]["keyframe"].asBool()); // Get properties JSON string at frame 500 properties = c1.PropertiesJSON(500); @@ -180,17 +170,17 @@ TEST(Properties) properties.c_str(), properties.c_str() + properties.size(), &root, &errors ); - REQUIRE CHECK_EQUAL(true, success); + CHECK(success == true); // Check for specific things - CHECK_CLOSE(0.0f, root["alpha"]["value"].asDouble(), 0.00001); - CHECK_EQUAL(true, root["alpha"]["keyframe"].asBool()); + CHECK(root["alpha"]["value"].asDouble() == Approx(0.0f).margin(0.00001)); + CHECK(root["alpha"]["keyframe"].asBool() == true); // Free up the reader we allocated delete reader; } -TEST(Effects) +TEST_CASE( "effects", "[libopenshot][clip]" ) { // Load clip with video std::stringstream path; @@ -209,13 +199,13 @@ TEST(Effects) int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_EQUAL(255, (int)pixels[pixel_index]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 1]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 2]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); + CHECK((int)pixels[pixel_index] == 255); + CHECK((int)pixels[pixel_index + 1] == 255); + CHECK((int)pixels[pixel_index + 2] == 255); + CHECK((int)pixels[pixel_index + 3] == 255); // Check the # of Effects - CHECK_EQUAL(1, (int)c10.Effects().size()); + CHECK((int)c10.Effects().size() == 1); // Add a 2nd negate effect @@ -230,16 +220,16 @@ TEST(Effects) pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_EQUAL(0, (int)pixels[pixel_index]); - CHECK_EQUAL(0, (int)pixels[pixel_index + 1]); - CHECK_EQUAL(0, (int)pixels[pixel_index + 2]); - CHECK_EQUAL(255, (int)pixels[pixel_index + 3]); + CHECK((int)pixels[pixel_index] == 0); + CHECK((int)pixels[pixel_index + 1] == 0); + CHECK((int)pixels[pixel_index + 2] == 0); + CHECK((int)pixels[pixel_index + 3] == 255); // Check the # of Effects - CHECK_EQUAL(2, (int)c10.Effects().size()); + CHECK((int)c10.Effects().size() == 2); } -TEST(Verify_Parent_Timeline) +TEST_CASE( "verify parent Timeline", "[libopenshot][clip]" ) { Timeline t1(640, 480, Fraction(30,1), 44100, 2, LAYOUT_STEREO); @@ -250,15 +240,13 @@ TEST(Verify_Parent_Timeline) c1.Open(); // Check size of frame image - CHECK_EQUAL(c1.GetFrame(1)->GetImage()->width(), 1280); - CHECK_EQUAL(c1.GetFrame(1)->GetImage()->height(), 720); + CHECK(1280 == c1.GetFrame(1)->GetImage()->width()); + CHECK(720 == c1.GetFrame(1)->GetImage()->height()); // Add clip to timeline t1.AddClip(&c1); // Check size of frame image (with an associated timeline) - CHECK_EQUAL(c1.GetFrame(1)->GetImage()->width(), 640); - CHECK_EQUAL(c1.GetFrame(1)->GetImage()->height(), 360); + CHECK(640 == c1.GetFrame(1)->GetImage()->width()); + CHECK(360 == c1.GetFrame(1)->GetImage()->height()); } - -} // SUITE diff --git a/tests/Color_Tests.cpp b/tests/Color.cpp similarity index 60% rename from tests/Color_Tests.cpp rename to tests/Color.cpp index 8b110f9e1..dcd4b1d6b 100644 --- a/tests/Color_Tests.cpp +++ b/tests/Color.cpp @@ -31,27 +31,24 @@ #include #include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "Color.h" #include "Exceptions.h" #include "KeyFrame.h" #include "Json.h" -SUITE(Color) { - -TEST(Default_Constructor) +TEST_CASE( "default constructor", "[libopenshot][color]" ) { // Create an empty color openshot::Color c1; - CHECK_CLOSE(0.0f, c1.red.GetValue(0), 0.00001); - CHECK_CLOSE(0.0f, c1.green.GetValue(0), 0.00001); - CHECK_CLOSE(0.0f, c1.blue.GetValue(0), 0.00001); + CHECK(c1.red.GetValue(0) == Approx(0.0f).margin(0.00001)); + CHECK(c1.green.GetValue(0) == Approx(0.0f).margin(0.00001)); + CHECK(c1.blue.GetValue(0) == Approx(0.0f).margin(0.00001)); } -TEST(Keyframe_constructor) +TEST_CASE( "Keyframe constructor", "[libopenshot][color]" ) { std::vector kfs{0, 0, 0, 0}; int64_t i(0); @@ -60,13 +57,13 @@ TEST(Keyframe_constructor) } auto c = openshot::Color(kfs[0], kfs[1], kfs[2], kfs[3]); - CHECK_CLOSE(20, c.red.GetLong(100), 0.01); - CHECK_CLOSE(40, c.green.GetLong(100), 0.01); - CHECK_CLOSE(60, c.blue.GetLong(100), 0.01); - CHECK_CLOSE(80, c.alpha.GetLong(100), 0.01); + CHECK(c.red.GetLong(100) == Approx(20).margin(0.01)); + CHECK(c.green.GetLong(100) == Approx(40).margin(0.01)); + CHECK(c.blue.GetLong(100) == Approx(60).margin(0.01)); + CHECK(c.alpha.GetLong(100) == Approx(80).margin(0.01)); } -TEST(Animate_Colors) +TEST_CASE( "Animate_Colors", "[libopenshot][color]" ) { // Create an empty color openshot::Color c1; @@ -82,12 +79,12 @@ TEST(Animate_Colors) c1.blue.AddPoint(1000, 65); // Check the color at frame 500 - CHECK_CLOSE(0, c1.red.GetLong(500), 0.01); - CHECK_CLOSE(187, c1.green.GetLong(500), 0.01); - CHECK_CLOSE(160, c1.blue.GetLong(500), 0.01); + CHECK(c1.red.GetLong(500) == Approx(0).margin(0.01)); + CHECK(c1.green.GetLong(500) == Approx(187).margin(0.01)); + CHECK(c1.blue.GetLong(500) == Approx(160).margin(0.01)); } -TEST(HEX_Value) +TEST_CASE( "HEX_Value", "[libopenshot][color]" ) { // Color openshot::Color c; @@ -98,13 +95,13 @@ TEST(HEX_Value) c.blue = openshot::Keyframe(0); c.blue.AddPoint(100, 255); - CHECK_EQUAL("#000000", c.GetColorHex(1)); - CHECK_EQUAL("#7d7d7d", c.GetColorHex(50)); - CHECK_EQUAL("#ffffff", c.GetColorHex(100)); + CHECK(c.GetColorHex(1) == "#000000"); + CHECK(c.GetColorHex(50) == "#7d7d7d"); + CHECK(c.GetColorHex(100) == "#ffffff"); } -TEST(HEX_Constructor) +TEST_CASE( "HEX_Constructor", "[libopenshot][color]" ) { // Color openshot::Color c("#4586db"); @@ -112,12 +109,12 @@ TEST(HEX_Constructor) c.green.AddPoint(100, 255); c.blue.AddPoint(100, 255); - CHECK_EQUAL("#4586db", c.GetColorHex(1)); - CHECK_EQUAL("#a0c1ed", c.GetColorHex(50)); - CHECK_EQUAL("#ffffff", c.GetColorHex(100)); + CHECK(c.GetColorHex(1) == "#4586db"); + CHECK(c.GetColorHex(50) == "#a0c1ed"); + CHECK(c.GetColorHex(100) == "#ffffff"); } -TEST(Distance) +TEST_CASE( "Distance", "[libopenshot][color]" ) { // Color openshot::Color c1("#040a0c"); @@ -125,11 +122,19 @@ TEST(Distance) openshot::Color c3("#000000"); openshot::Color c4("#ffffff"); - CHECK_CLOSE(19.0f, openshot::Color::GetDistance(c1.red.GetInt(1), c1.blue.GetInt(1), c1.green.GetInt(1), c2.red.GetInt(1), c2.blue.GetInt(1), c2.green.GetInt(1)), 0.001); - CHECK_CLOSE(764.0f, openshot::Color::GetDistance(c3.red.GetInt(1), c3.blue.GetInt(1), c3.green.GetInt(1), c4.red.GetInt(1), c4.blue.GetInt(1), c4.green.GetInt(1)), 0.001); + CHECK( + openshot::Color::GetDistance( + c1.red.GetInt(1), c1.blue.GetInt(1), c1.green.GetInt(1), + c2.red.GetInt(1), c2.blue.GetInt(1), c2.green.GetInt(1) + ) == Approx(19.0f).margin(0.001)); + CHECK( + openshot::Color::GetDistance( + c3.red.GetInt(1), c3.blue.GetInt(1), c3.green.GetInt(1), + c4.red.GetInt(1), c4.blue.GetInt(1), c4.green.GetInt(1) + ) == Approx(764.0f).margin(0.001)); } -TEST(RGBA_Constructor) +TEST_CASE( "RGBA_Constructor", "[libopenshot][color]" ) { // Color openshot::Color c(69, 134, 219, 255); @@ -137,17 +142,17 @@ TEST(RGBA_Constructor) c.green.AddPoint(100, 255); c.blue.AddPoint(100, 255); - CHECK_EQUAL("#4586db", c.GetColorHex(1)); - CHECK_EQUAL("#a0c1ed", c.GetColorHex(50)); - CHECK_EQUAL("#ffffff", c.GetColorHex(100)); + CHECK(c.GetColorHex(1) == "#4586db"); + CHECK(c.GetColorHex(50) == "#a0c1ed"); + CHECK(c.GetColorHex(100) == "#ffffff"); // Color with alpha openshot::Color c1(69, 134, 219, 128); - CHECK_EQUAL("#4586db", c1.GetColorHex(1)); - CHECK_EQUAL(128, c1.alpha.GetInt(1)); + CHECK(c1.GetColorHex(1) == "#4586db"); + CHECK(c1.alpha.GetInt(1) == 128); } -TEST(Json) +TEST_CASE( "Json", "[libopenshot][color]" ) { openshot::Color c(128, 128, 128, 0); openshot::Color c1; @@ -158,14 +163,14 @@ TEST(Json) // Check that JSON produced is identical auto j = c.Json(); auto j1 = c1.Json(); - CHECK_EQUAL(j, j1); + CHECK(j1 == j); // Check Json::Value representation auto jv = c.JsonValue(); auto jv_string = jv.toStyledString(); - CHECK_EQUAL(jv_string, j1); + CHECK(j1 == jv_string); } -TEST(SetJson) { +TEST_CASE( "SetJson", "[libopenshot][color]" ) { const std::string json_input = R"json( { "red": { "Points": [ { "co": { "X": 1.0, "Y": 0.0 }, "interpolation": 0 } ] }, @@ -175,12 +180,10 @@ TEST(SetJson) { } )json"; openshot::Color c; - CHECK_THROW(c.SetJson("}{"), openshot::InvalidJSON); + CHECK_THROWS_AS(c.SetJson("}{"), openshot::InvalidJSON); c.SetJson(json_input); - CHECK_CLOSE(0, c.red.GetLong(10), 0.01); - CHECK_CLOSE(128, c.green.GetLong(10), 0.01); - CHECK_CLOSE(64, c.blue.GetLong(10), 0.01); - CHECK_CLOSE(192, c.alpha.GetLong(10), 0.01); + CHECK(c.red.GetLong(10) == Approx(0).margin(0.01)); + CHECK(c.green.GetLong(10) == Approx(128).margin(0.01)); + CHECK(c.blue.GetLong(10) == Approx(64).margin(0.01)); + CHECK(c.alpha.GetLong(10) == Approx(192).margin(0.01)); } - -} // SUITE diff --git a/tests/Coordinate_Tests.cpp b/tests/Coordinate.cpp similarity index 71% rename from tests/Coordinate_Tests.cpp rename to tests/Coordinate.cpp index 7dd5886c1..8fe075750 100644 --- a/tests/Coordinate_Tests.cpp +++ b/tests/Coordinate.cpp @@ -28,43 +28,39 @@ * along with OpenShot Library. If not, see . */ -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "Coordinate.h" #include "Exceptions.h" using namespace openshot; -SUITE(Coordinate) -{ - -TEST(Default_Constructor) +TEST_CASE( "default constructor", "[libopenshot][coordinate]" ) { // Create an empty coordinate Coordinate c1; - CHECK_CLOSE(0.0f, c1.X, 0.00001); - CHECK_CLOSE(0.0f, c1.Y, 0.00001); + CHECK(c1.X == Approx(0.0f).margin(0.00001)); + CHECK(c1.Y == Approx(0.0f).margin(0.00001)); } -TEST(X_Y_Constructor) +TEST_CASE( "XY constructor", "[libopenshot][coordinate]" ) { // Create an empty coordinate Coordinate c1(2,8); - CHECK_CLOSE(2.0f, c1.X, 0.00001); - CHECK_CLOSE(8.0f, c1.Y, 0.00001); + CHECK(c1.X == Approx(2.0f).margin(0.00001)); + CHECK(c1.Y == Approx(8.0f).margin(0.00001)); } -TEST(Pair_Constructor) +TEST_CASE( "std::pair constructor", "[libopenshot][coordinate]" ) { Coordinate c1(std::pair(12, 10)); - CHECK_CLOSE(12.0f, c1.X, 0.00001); - CHECK_CLOSE(10.0f, c1.Y, 0.00001); + CHECK(c1.X == Approx(12.0f).margin(0.00001)); + CHECK(c1.Y == Approx(10.0f).margin(0.00001)); } -TEST(Json) +TEST_CASE( "Json", "[libopenshot][coordinate]" ) { openshot::Coordinate c(100, 200); openshot::Coordinate c1; @@ -73,14 +69,14 @@ TEST(Json) // Check that JSON produced is identical auto j = c.Json(); auto j1 = c1.Json(); - CHECK_EQUAL(j, j1); + CHECK(j1 == j); // Check Json::Value representation auto jv = c.JsonValue(); auto jv_string = jv.toStyledString(); - CHECK_EQUAL(jv_string, j1); + CHECK(j1 == jv_string); } -TEST(SetJson) { +TEST_CASE( "SetJson", "[libopenshot][coordinate]" ) { // Construct our input Json representation const std::string json_input = R"json( { @@ -89,11 +85,9 @@ TEST(SetJson) { } )json"; openshot::Coordinate c; - CHECK_THROW(c.SetJson("}{"), openshot::InvalidJSON); + CHECK_THROWS_AS(c.SetJson("}{"), openshot::InvalidJSON); // Check that values set via SetJson() are correct c.SetJson(json_input); - CHECK_CLOSE(100.0, c.X, 0.01); - CHECK_CLOSE(50.0, c.Y, 0.01); + CHECK(c.X == Approx(100.0).margin(0.01)); + CHECK(c.Y == Approx(50.0).margin(0.01)); } - -} // SUITE diff --git a/tests/DummyReader_Tests.cpp b/tests/DummyReader.cpp similarity index 57% rename from tests/DummyReader_Tests.cpp rename to tests/DummyReader.cpp index b8a86d02e..fc832624e 100644 --- a/tests/DummyReader_Tests.cpp +++ b/tests/DummyReader.cpp @@ -30,9 +30,7 @@ #include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include #include "DummyReader.h" #include "Exceptions.h" @@ -40,62 +38,64 @@ #include "Fraction.h" #include "Frame.h" -using namespace std; -using namespace openshot; - -TEST (DummyReader_Basic_Constructor) { +TEST_CASE( "Default constructor", "[libopenshot][dummyreader]" ) { // Create a default fraction (should be 1/1) openshot::DummyReader r; r.Open(); // Open the reader // Check values - CHECK_EQUAL(1280, r.info.width); - CHECK_EQUAL(768, r.info.height); - CHECK_EQUAL(24, r.info.fps.num); - CHECK_EQUAL(1, r.info.fps.den); - CHECK_EQUAL(44100, r.info.sample_rate); - CHECK_EQUAL(2, r.info.channels); - CHECK_EQUAL(30.0, r.info.duration); + CHECK(r.info.width == 1280); + CHECK(r.info.height == 768); + CHECK(r.info.fps.num == 24); + CHECK(r.info.fps.den == 1); + CHECK(r.info.sample_rate == 44100); + CHECK(r.info.channels == 2); + CHECK(r.info.duration == 30.0); + + CHECK(r.Name() == "DummyReader"); + + auto cache = r.GetCache(); + CHECK(cache == nullptr); } -TEST (DummyReader_Constructor) { +TEST_CASE( "Constructor", "[libopenshot][dummyreader]" ) { // Create a default fraction (should be 1/1) openshot::DummyReader r(openshot::Fraction(30, 1), 1920, 1080, 44100, 2, 60.0); r.Open(); // Open the reader // Check values - CHECK_EQUAL(1920, r.info.width); - CHECK_EQUAL(1080, r.info.height); - CHECK_EQUAL(30, r.info.fps.num); - CHECK_EQUAL(1, r.info.fps.den); - CHECK_EQUAL(44100, r.info.sample_rate); - CHECK_EQUAL(2, r.info.channels); - CHECK_EQUAL(60.0, r.info.duration); + CHECK(r.info.width == 1920); + CHECK(r.info.height == 1080); + CHECK(r.info.fps.num == 30); + CHECK(r.info.fps.den == 1); + CHECK(r.info.sample_rate == 44100); + CHECK(r.info.channels == 2); + CHECK(r.info.duration == 60.0); } -TEST (DummyReader_Blank_Frame) { +TEST_CASE( "Blank_Frame", "[libopenshot][dummyreader]" ) { // Create a default fraction (should be 1/1) openshot::DummyReader r(openshot::Fraction(30, 1), 1920, 1080, 44100, 2, 30.0); r.Open(); // Open the reader // Get a blank frame (because we have not passed a Cache object (full of Frame objects) to the constructor // Check values - CHECK_EQUAL(1, r.GetFrame(1)->number); - CHECK_EQUAL(1, r.GetFrame(1)->GetPixels(700)[700] == 0); // black pixel - CHECK_EQUAL(1, r.GetFrame(1)->GetPixels(701)[701] == 0); // black pixel + CHECK(r.GetFrame(1)->number == 1); + CHECK(r.GetFrame(1)->GetPixels(700)[700] == 0); // black pixel + CHECK(r.GetFrame(1)->GetPixels(701)[701] == 0); // black pixel } -TEST (DummyReader_Fake_Frame) { +TEST_CASE( "Fake_Frame", "[libopenshot][dummyreader]" ) { // Create cache object to hold test frames - CacheMemory cache; + openshot::CacheMemory cache; // Let's create some test frames for (int64_t frame_number = 1; frame_number <= 30; frame_number++) { // Create blank frame (with specific frame #, samples, and channels) // Sample count should be 44100 / 30 fps = 1470 samples per frame int sample_count = 1470; - std::shared_ptr f(new openshot::Frame(frame_number, sample_count, 2)); + auto f = std::make_shared(frame_number, sample_count, 2); // Create test samples with incrementing value float *audio_buffer = new float[sample_count]; @@ -110,6 +110,8 @@ TEST (DummyReader_Fake_Frame) { // Add test frame to dummy reader cache.Add(f); + + delete[] audio_buffer; } // Create a default fraction (should be 1/1) @@ -117,26 +119,26 @@ TEST (DummyReader_Fake_Frame) { r.Open(); // Open the reader // Verify our artificial audio sample data is correct - CHECK_EQUAL(1, r.GetFrame(1)->number); - CHECK_EQUAL(1, r.GetFrame(1)->GetAudioSamples(0)[0]); - CHECK_CLOSE(1.00068033, r.GetFrame(1)->GetAudioSamples(0)[1], 0.00001); - CHECK_CLOSE(1.00136054, r.GetFrame(1)->GetAudioSamples(0)[2], 0.00001); - CHECK_EQUAL(2, r.GetFrame(2)->GetAudioSamples(0)[0]); - CHECK_CLOSE(2.00068033, r.GetFrame(2)->GetAudioSamples(0)[1], 0.00001); - CHECK_CLOSE(2.00136054, r.GetFrame(2)->GetAudioSamples(0)[2], 0.00001); + CHECK(r.GetFrame(1)->number == 1); + CHECK(r.GetFrame(1)->GetAudioSamples(0)[0] == 1); + CHECK(r.GetFrame(1)->GetAudioSamples(0)[1] == Approx(1.00068033).margin(0.00001)); + CHECK(r.GetFrame(1)->GetAudioSamples(0)[2] == Approx(1.00136054).margin(0.00001)); + CHECK(r.GetFrame(2)->GetAudioSamples(0)[0] == 2); + CHECK(r.GetFrame(2)->GetAudioSamples(0)[1] == Approx(2.00068033).margin(0.00001)); + CHECK(r.GetFrame(2)->GetAudioSamples(0)[2] == Approx(2.00136054).margin(0.00001)); // Clean up cache.Clear(); r.Close(); } -TEST (DummyReader_Invalid_Fake_Frame) { +TEST_CASE( "Invalid_Fake_Frame", "[libopenshot][dummyreader]" ) { // Create fake frames (with specific frame #, samples, and channels) - std::shared_ptr f1(new openshot::Frame(1, 1470, 2)); - std::shared_ptr f2(new openshot::Frame(2, 1470, 2)); + auto f1 = std::make_shared(1, 1470, 2); + auto f2 = std::make_shared(2, 1470, 2); // Add test frames to cache object - CacheMemory cache; + openshot::CacheMemory cache; cache.Add(f1); cache.Add(f2); @@ -145,11 +147,40 @@ TEST (DummyReader_Invalid_Fake_Frame) { r.Open(); // Verify exception - CHECK_EQUAL(1, r.GetFrame(1)->number); - CHECK_EQUAL(2, r.GetFrame(2)->number); - CHECK_THROW(r.GetFrame(3)->number, InvalidFile); + CHECK(r.GetFrame(1)->number == 1); + CHECK(r.GetFrame(2)->number == 2); + CHECK_THROWS_AS(r.GetFrame(3)->number, openshot::InvalidFile); // Clean up cache.Clear(); r.Close(); } + +TEST_CASE( "Json", "[libopenshot][dummyreader]") { + openshot::DummyReader r1; + openshot::DummyReader r2(openshot::Fraction(24, 1), 1280, 768, 44100, 2, 30.0); + auto json1 = r1.Json(); + auto json2 = r2.JsonValue(); + auto json_string2 = json2.toStyledString(); + CHECK(json_string2 == json1); +} + +TEST_CASE( "SetJson", "[libopenshot][dummyreader]") { + openshot::DummyReader r1; + std::stringstream json_stream; + json_stream << R"json( + { + "width": 1920, + "height": 1080, + "fps": { "num": 15, "den": 1 }, + "duration": 15.0 + } + )json"; + + r1.SetJson(json_stream.str()); + CHECK(r1.info.width == 1920); + CHECK(r1.info.height == 1080); + CHECK(r1.info.fps.num == 15); + CHECK(r1.info.fps.den == 1); + CHECK(r1.info.duration == 15.0); +} diff --git a/tests/FFmpegReader_Tests.cpp b/tests/FFmpegReader.cpp similarity index 63% rename from tests/FFmpegReader_Tests.cpp rename to tests/FFmpegReader.cpp index f5dc44350..217d601ca 100644 --- a/tests/FFmpegReader_Tests.cpp +++ b/tests/FFmpegReader.cpp @@ -31,9 +31,8 @@ #include #include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "FFmpegReader.h" #include "Exceptions.h" #include "Frame.h" @@ -43,16 +42,13 @@ using namespace std; using namespace openshot; -SUITE(FFmpegReader) -{ - -TEST(Invalid_Path) +TEST_CASE( "Invalid_Path", "[libopenshot][ffmpegreader]" ) { // Check invalid path - CHECK_THROW(FFmpegReader(""), InvalidFile); + CHECK_THROWS_AS(FFmpegReader(""), InvalidFile); } -TEST(GetFrame_Before_Opening) +TEST_CASE( "GetFrame_Before_Opening", "[libopenshot][ffmpegreader]" ) { // Create a reader stringstream path; @@ -60,10 +56,10 @@ TEST(GetFrame_Before_Opening) FFmpegReader r(path.str()); // Check invalid path - CHECK_THROW(r.GetFrame(1), ReaderClosed); + CHECK_THROWS_AS(r.GetFrame(1), ReaderClosed); } -TEST(Check_Audio_File) +TEST_CASE( "Check_Audio_File", "[libopenshot][ffmpegreader]" ) { // Create a reader stringstream path; @@ -78,22 +74,22 @@ TEST(Check_Audio_File) float *samples = f->GetAudioSamples(0); // Check audio properties - CHECK_EQUAL(2, f->GetAudioChannelsCount()); - CHECK_EQUAL(332, f->GetAudioSamplesCount()); + CHECK(f->GetAudioChannelsCount() == 2); + CHECK(f->GetAudioSamplesCount() == 332); // Check actual sample values (to be sure the waveform is correct) - CHECK_CLOSE(0.0f, samples[0], 0.00001); - CHECK_CLOSE(0.0f, samples[50], 0.00001); - CHECK_CLOSE(0.0f, samples[100], 0.00001); - CHECK_CLOSE(0.0f, samples[200], 0.00001); - CHECK_CLOSE(0.16406f, samples[230], 0.00001); - CHECK_CLOSE(-0.06250f, samples[300], 0.00001); + CHECK(samples[0] == Approx(0.0f).margin(0.00001)); + CHECK(samples[50] == Approx(0.0f).margin(0.00001)); + CHECK(samples[100] == Approx(0.0f).margin(0.00001)); + CHECK(samples[200] == Approx(0.0f).margin(0.00001)); + CHECK(samples[230] == Approx(0.16406f).margin(0.00001)); + CHECK(samples[300] == Approx(-0.06250f).margin(0.00001)); // Close reader r.Close(); } -TEST(Check_Video_File) +TEST_CASE( "Check_Video_File", "[libopenshot][ffmpegreader]" ) { // Create a reader stringstream path; @@ -109,14 +105,14 @@ TEST(Check_Video_File) int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_CLOSE(21, (int)pixels[pixel_index], 5); - CHECK_CLOSE(191, (int)pixels[pixel_index + 1], 5); - CHECK_CLOSE(0, (int)pixels[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); + CHECK((int)pixels[pixel_index] == Approx(21).margin(5)); + CHECK((int)pixels[pixel_index + 1] == Approx(191).margin(5)); + CHECK((int)pixels[pixel_index + 2] == Approx(0).margin(5)); + CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5)); // Check pixel function - CHECK_EQUAL(true, f->CheckPixel(10, 112, 21, 191, 0, 255, 5)); - CHECK_EQUAL(false, f->CheckPixel(10, 112, 0, 0, 0, 0, 5)); + CHECK(f->CheckPixel(10, 112, 21, 191, 0, 255, 5) == true); + CHECK_FALSE(f->CheckPixel(10, 112, 0, 0, 0, 0, 5)); // Get frame 1 f = r.GetFrame(2); @@ -126,20 +122,20 @@ TEST(Check_Video_File) pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_CLOSE(0, (int)pixels[pixel_index], 5); - CHECK_CLOSE(96, (int)pixels[pixel_index + 1], 5); - CHECK_CLOSE(188, (int)pixels[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); + CHECK((int)pixels[pixel_index] == Approx(0).margin(5)); + CHECK((int)pixels[pixel_index + 1] == Approx(96).margin(5)); + CHECK((int)pixels[pixel_index + 2] == Approx(188).margin(5)); + CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5)); // Check pixel function - CHECK_EQUAL(true, f->CheckPixel(10, 112, 0, 96, 188, 255, 5)); - CHECK_EQUAL(false, f->CheckPixel(10, 112, 0, 0, 0, 0, 5)); + CHECK(f->CheckPixel(10, 112, 0, 96, 188, 255, 5) == true); + CHECK_FALSE(f->CheckPixel(10, 112, 0, 0, 0, 0, 5)); // Close reader r.Close(); } -TEST(Seek) +TEST_CASE( "Seek", "[libopenshot][ffmpegreader]" ) { // Create a reader stringstream path; @@ -149,54 +145,54 @@ TEST(Seek) // Get frame std::shared_ptr f = r.GetFrame(1); - CHECK_EQUAL(1, f->number); + CHECK(f->number == 1); // Get frame f = r.GetFrame(300); - CHECK_EQUAL(300, f->number); + CHECK(f->number == 300); // Get frame f = r.GetFrame(301); - CHECK_EQUAL(301, f->number); + CHECK(f->number == 301); // Get frame f = r.GetFrame(315); - CHECK_EQUAL(315, f->number); + CHECK(f->number == 315); // Get frame f = r.GetFrame(275); - CHECK_EQUAL(275, f->number); + CHECK(f->number == 275); // Get frame f = r.GetFrame(270); - CHECK_EQUAL(270, f->number); + CHECK(f->number == 270); // Get frame f = r.GetFrame(500); - CHECK_EQUAL(500, f->number); + CHECK(f->number == 500); // Get frame f = r.GetFrame(100); - CHECK_EQUAL(100, f->number); + CHECK(f->number == 100); // Get frame f = r.GetFrame(600); - CHECK_EQUAL(600, f->number); + CHECK(f->number == 600); // Get frame f = r.GetFrame(1); - CHECK_EQUAL(1, f->number); + CHECK(f->number == 1); // Get frame f = r.GetFrame(700); - CHECK_EQUAL(700, f->number); + CHECK(f->number == 700); // Close reader r.Close(); } -TEST(Frame_Rate) +TEST_CASE( "Frame_Rate", "[libopenshot][ffmpegreader]" ) { // Create a reader stringstream path; @@ -206,13 +202,13 @@ TEST(Frame_Rate) // Verify detected frame rate openshot::Fraction rate = r.info.fps; - CHECK_EQUAL(24, rate.num); - CHECK_EQUAL(1, rate.den); + CHECK(rate.num == 24); + CHECK(rate.den == 1); r.Close(); } -TEST(Multiple_Open_and_Close) +TEST_CASE( "Multiple_Open_and_Close", "[libopenshot][ffmpegreader]" ) { // Create a reader stringstream path; @@ -222,7 +218,7 @@ TEST(Multiple_Open_and_Close) // Get frame that requires a seek std::shared_ptr f = r.GetFrame(1200); - CHECK_EQUAL(1200, f->number); + CHECK(f->number == 1200); // Close and Re-open the reader r.Close(); @@ -230,9 +226,9 @@ TEST(Multiple_Open_and_Close) // Get frame f = r.GetFrame(1); - CHECK_EQUAL(1, f->number); + CHECK(f->number == 1); f = r.GetFrame(250); - CHECK_EQUAL(250, f->number); + CHECK(f->number == 250); // Close and Re-open the reader r.Close(); @@ -240,15 +236,15 @@ TEST(Multiple_Open_and_Close) // Get frame f = r.GetFrame(750); - CHECK_EQUAL(750, f->number); + CHECK(f->number == 750); f = r.GetFrame(1000); - CHECK_EQUAL(1000, f->number); + CHECK(f->number == 1000); // Close reader r.Close(); } -TEST(Verify_Parent_Timeline) +TEST_CASE( "verify parent Timeline", "[libopenshot][ffmpegreader]" ) { // Create a reader stringstream path; @@ -257,8 +253,8 @@ TEST(Verify_Parent_Timeline) r.Open(); // Check size of frame image - CHECK_EQUAL(r.GetFrame(1)->GetImage()->width(), 1280); - CHECK_EQUAL(r.GetFrame(1)->GetImage()->height(), 720); + CHECK(r.GetFrame(1)->GetImage()->width() == 1280); + CHECK(r.GetFrame(1)->GetImage()->height() == 720); r.GetFrame(1)->GetImage()->save("reader-1.png", "PNG"); // Create a Clip associated with this reader @@ -266,16 +262,14 @@ TEST(Verify_Parent_Timeline) c1.Open(); // Check size of frame image (should still be the same) - CHECK_EQUAL(r.GetFrame(1)->GetImage()->width(), 1280); - CHECK_EQUAL(r.GetFrame(1)->GetImage()->height(), 720); + CHECK(r.GetFrame(1)->GetImage()->width() == 1280); + CHECK(r.GetFrame(1)->GetImage()->height() == 720); // Create Timeline Timeline t1(640, 480, Fraction(30,1), 44100, 2, LAYOUT_STEREO); t1.AddClip(&c1); // Check size of frame image (it should now match the parent timeline) - CHECK_EQUAL(r.GetFrame(1)->GetImage()->width(), 640); - CHECK_EQUAL(r.GetFrame(1)->GetImage()->height(), 360); + CHECK(r.GetFrame(1)->GetImage()->width() == 640); + CHECK(r.GetFrame(1)->GetImage()->height() == 360); } - -} // SUITE(FFmpegReader) diff --git a/tests/FFmpegWriter_Tests.cpp b/tests/FFmpegWriter.cpp similarity index 76% rename from tests/FFmpegWriter_Tests.cpp rename to tests/FFmpegWriter.cpp index 0160ac929..059bbb4de 100644 --- a/tests/FFmpegWriter_Tests.cpp +++ b/tests/FFmpegWriter.cpp @@ -31,9 +31,8 @@ #include #include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "FFmpegWriter.h" #include "Exceptions.h" #include "FFmpegReader.h" @@ -43,8 +42,7 @@ using namespace std; using namespace openshot; -SUITE(FFMpegWriter) { -TEST(Webm) +TEST_CASE( "Webm", "[libopenshot][ffmpegwriter]" ) { // Reader stringstream path; @@ -73,9 +71,9 @@ TEST(Webm) r1.Open(); // Verify various settings on new MP4 - CHECK_EQUAL(2, r1.GetFrame(1)->GetAudioChannelsCount()); - CHECK_EQUAL(24, r1.info.fps.num); - CHECK_EQUAL(1, r1.info.fps.den); + CHECK(r1.GetFrame(1)->GetAudioChannelsCount() == 2); + CHECK(r1.info.fps.num == 24); + CHECK(r1.info.fps.den == 1); // Get a specific frame std::shared_ptr f = r1.GetFrame(8); @@ -85,13 +83,13 @@ TEST(Webm) int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel) // Check image properties on scanline 10, pixel 112 - CHECK_CLOSE(23, (int)pixels[pixel_index], 5); - CHECK_CLOSE(23, (int)pixels[pixel_index + 1], 5); - CHECK_CLOSE(23, (int)pixels[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); + CHECK((int)pixels[pixel_index] == Approx(23).margin(5)); + CHECK((int)pixels[pixel_index + 1] == Approx(23).margin(5)); + CHECK((int)pixels[pixel_index + 2] == Approx(23).margin(5)); + CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5)); } -TEST(Options_Overloads) +TEST_CASE( "Options_Overloads", "[libopenshot][ffmpegwriter]" ) { // Reader stringstream path; @@ -120,16 +118,14 @@ TEST(Options_Overloads) r1.Open(); // Verify implied settings - CHECK_EQUAL(true, r1.info.has_audio); - CHECK_EQUAL(true, r1.info.has_video); + CHECK(r1.info.has_audio == true); + CHECK(r1.info.has_video == true); - CHECK_EQUAL(2, r1.GetFrame(1)->GetAudioChannelsCount()); - CHECK_EQUAL(LAYOUT_STEREO, r1.info.channel_layout); + CHECK(r1.GetFrame(1)->GetAudioChannelsCount() == 2); + CHECK(r1.info.channel_layout == LAYOUT_STEREO); - CHECK_EQUAL(1, r1.info.pixel_ratio.num); - CHECK_EQUAL(1, r1.info.pixel_ratio.den); - CHECK_EQUAL(false, r1.info.interlaced_frame); - CHECK_EQUAL(true, r1.info.top_field_first); + CHECK(r1.info.pixel_ratio.num == 1); + CHECK(r1.info.pixel_ratio.den == 1); + CHECK_FALSE(r1.info.interlaced_frame); + CHECK(r1.info.top_field_first == true); } - -} // SUITE() diff --git a/tests/Fraction_Tests.cpp b/tests/Fraction.cpp similarity index 53% rename from tests/Fraction_Tests.cpp rename to tests/Fraction.cpp index 760a83807..8736abaf8 100644 --- a/tests/Fraction_Tests.cpp +++ b/tests/Fraction.cpp @@ -28,129 +28,123 @@ * along with OpenShot Library. If not, see . */ -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "Fraction.h" using namespace std; using namespace openshot; -SUITE(Fraction) -{ - -TEST(Constructors) +TEST_CASE( "Constructors", "[libopenshot][fraction]" ) { // Create a default fraction (should be 1/1) Fraction f1; // Check default fraction - CHECK_EQUAL(1, f1.num); - CHECK_EQUAL(1, f1.den); - CHECK_CLOSE(1.0f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.0f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 1); + CHECK(f1.den == 1); + CHECK(f1.ToFloat() == Approx(1.0f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.0f).margin(0.00001)); // reduce fraction f1.Reduce(); // Check the reduced fraction - CHECK_EQUAL(1, f1.num); - CHECK_EQUAL(1, f1.den); - CHECK_CLOSE(1.0f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.0f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 1); + CHECK(f1.den == 1); + CHECK(f1.ToFloat() == Approx(1.0f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.0f).margin(0.00001)); } -TEST(Alt_Constructors) +TEST_CASE( "Alt_Constructors", "[libopenshot][fraction]" ) { // Use the delegating constructor for std::pair std::pair args{24, 1}; Fraction f1(args); - CHECK_EQUAL(24, f1.num); - CHECK_EQUAL(1, f1.den); - CHECK_CLOSE(24.0f, f1.ToFloat(), 0.00001); + CHECK(f1.num == 24); + CHECK(f1.den == 1); + CHECK(f1.ToFloat() == Approx(24.0f).margin(0.00001)); // Use the delegating constructor for std::vector std::vector v{30000, 1001}; Fraction f2(v); - CHECK_CLOSE(30000.0/1001.0, f2.ToFloat(), 0.00001); + CHECK(f2.ToFloat() == Approx(30000.0/1001.0).margin(0.00001)); // Use the delegating constructor for std::map std::map dict; dict.insert({"num", 24000}); dict.insert({"den", 1001}); Fraction f3(dict); - CHECK_EQUAL(1001, f3.den); - CHECK_EQUAL(24000, f3.num); - CHECK_CLOSE(1001.0/24000.0, f3.Reciprocal().ToFloat(), 0.00001); + CHECK(f3.den == 1001); + CHECK(f3.num == 24000); + CHECK(f3.Reciprocal().ToFloat() == Approx(1001.0/24000.0).margin(0.00001)); } -TEST(WxH_640_480) +TEST_CASE( "WxH_640_480", "[libopenshot][fraction]" ) { // Create fraction Fraction f1(640, 480); // Check fraction - CHECK_EQUAL(640, f1.num); - CHECK_EQUAL(480, f1.den); - CHECK_CLOSE(1.33333f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.33333f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 640); + CHECK(f1.den == 480); + CHECK(f1.ToFloat() == Approx(1.33333f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.33333f).margin(0.00001)); // reduce fraction f1.Reduce(); // Check the reduced fraction - CHECK_EQUAL(4, f1.num); - CHECK_EQUAL(3, f1.den); - CHECK_CLOSE(1.33333f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.33333f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 4); + CHECK(f1.den == 3); + CHECK(f1.ToFloat() == Approx(1.33333f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.33333f).margin(0.00001)); } -TEST(WxH_1280_720) +TEST_CASE( "WxH_1280_720", "[libopenshot][fraction]" ) { // Create fraction Fraction f1(1280, 720); // Check fraction - CHECK_EQUAL(1280, f1.num); - CHECK_EQUAL(720, f1.den); - CHECK_CLOSE(1.77777f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.77777f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 1280); + CHECK(f1.den == 720); + CHECK(f1.ToFloat() == Approx(1.77777f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.77777f).margin(0.00001)); // reduce fraction f1.Reduce(); // Check the reduced fraction - CHECK_EQUAL(16, f1.num); - CHECK_EQUAL(9, f1.den); - CHECK_CLOSE(1.77777f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.77777f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 16); + CHECK(f1.den == 9); + CHECK(f1.ToFloat() == Approx(1.77777f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.77777f).margin(0.00001)); } -TEST(Reciprocal) +TEST_CASE( "Reciprocal", "[libopenshot][fraction]" ) { // Create fraction Fraction f1(1280, 720); // Check fraction - CHECK_EQUAL(1280, f1.num); - CHECK_EQUAL(720, f1.den); - CHECK_CLOSE(1.77777f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.77777f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 1280); + CHECK(f1.den == 720); + CHECK(f1.ToFloat() == Approx(1.77777f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.77777f).margin(0.00001)); // Get the reciprocal of the fraction (i.e. flip the fraction) Fraction f2 = f1.Reciprocal(); // Check the reduced fraction - CHECK_EQUAL(720, f2.num); - CHECK_EQUAL(1280, f2.den); - CHECK_CLOSE(0.5625f, f2.ToFloat(), 0.00001); - CHECK_CLOSE(0.5625f, f2.ToDouble(), 0.00001); + CHECK(f2.num == 720); + CHECK(f2.den == 1280); + CHECK(f2.ToFloat() == Approx(0.5625f).margin(0.00001)); + CHECK(f2.ToDouble() == Approx(0.5625f).margin(0.00001)); // Re-Check the original fraction (to be sure it hasn't changed) - CHECK_EQUAL(1280, f1.num); - CHECK_EQUAL(720, f1.den); - CHECK_CLOSE(1.77777f, f1.ToFloat(), 0.00001); - CHECK_CLOSE(1.77777f, f1.ToDouble(), 0.00001); + CHECK(f1.num == 1280); + CHECK(f1.den == 720); + CHECK(f1.ToFloat() == Approx(1.77777f).margin(0.00001)); + CHECK(f1.ToDouble() == Approx(1.77777f).margin(0.00001)); } - -} // SUITE diff --git a/tests/Frame_Tests.cpp b/tests/Frame.cpp similarity index 57% rename from tests/Frame_Tests.cpp rename to tests/Frame.cpp index 9038f8b85..0a780a323 100644 --- a/tests/Frame_Tests.cpp +++ b/tests/Frame.cpp @@ -32,52 +32,52 @@ #include #include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 -#include "Frame.h" -#include "Clip.h" -#include "Fraction.h" - #include #ifdef USE_OPENCV -#include +#define int64 opencv_broken_int +#define uint64 opencv_broken_uint +#include +#undef int64 +#undef uint64 #endif -using namespace openshot; +#include -SUITE(Frame_Tests) -{ +#include "Clip.h" +#include "Fraction.h" +#include "Frame.h" -TEST(Default_Constructor) +using namespace openshot; + +TEST_CASE( "Default_Constructor", "[libopenshot][frame]" ) { // Create a "blank" default Frame std::shared_ptr f1(new Frame()); - CHECK(f1 != nullptr); // Test aborts here if we didn't get a Frame + REQUIRE(f1 != nullptr); // Test aborts here if we didn't get a Frame // Check basic default parameters - CHECK_EQUAL(1, f1->GetHeight()); - CHECK_EQUAL(1, f1->GetWidth()); - CHECK_EQUAL(44100, f1->SampleRate()); - CHECK_EQUAL(2, f1->GetAudioChannelsCount()); + CHECK(f1->GetHeight() == 1); + CHECK(f1->GetWidth() == 1); + CHECK(f1->SampleRate() == 44100); + CHECK(f1->GetAudioChannelsCount() == 2); // Should be false until we load or create contents - CHECK_EQUAL(false, f1->has_image_data); - CHECK_EQUAL(false, f1->has_audio_data); + CHECK(f1->has_image_data == false); + CHECK(f1->has_audio_data == false); // Calling GetImage() paints a blank frame, by default std::shared_ptr i1 = f1->GetImage(); - CHECK(i1 != nullptr); + REQUIRE(i1 != nullptr); - CHECK_EQUAL(true,f1->has_image_data); - CHECK_EQUAL(false,f1->has_audio_data); + CHECK(f1->has_image_data == true); + CHECK(f1->has_audio_data == false); } -TEST(Data_Access) +TEST_CASE( "Data_Access", "[libopenshot][frame]" ) { // Create a video clip std::stringstream path; @@ -88,15 +88,15 @@ TEST(Data_Access) // Get first frame std::shared_ptr f1 = c1.GetFrame(1); - CHECK(f1 != nullptr); + REQUIRE(f1 != nullptr); - CHECK_EQUAL(1, f1->number); - CHECK_EQUAL(1280, f1->GetWidth()); - CHECK_EQUAL(720, f1->GetHeight()); + CHECK(f1->number == 1); + CHECK(f1->GetWidth() == 1280); + CHECK(f1->GetHeight() == 720); } -TEST(AddImage_QImage) +TEST_CASE( "AddImage_QImage", "[libopenshot][frame]" ) { // Create a "blank" default Frame std::shared_ptr f1(new Frame()); @@ -104,21 +104,21 @@ TEST(AddImage_QImage) // Load an image std::stringstream path; path << TEST_MEDIA_PATH << "front.png"; - std::shared_ptr i1(new QImage(QString::fromStdString(path.str()))) ; + auto i1 = std::make_shared(QString::fromStdString(path.str())); - CHECK(f1 != nullptr); // Test aborts here if we didn't get a Frame - CHECK_EQUAL(false, i1->isNull()); + REQUIRE(f1 != nullptr); // Test aborts here if we didn't get a Frame + CHECK(i1->isNull() == false); f1->AddImage(i1); // Check loaded image parameters - CHECK_EQUAL(i1->height(), f1->GetHeight()); - CHECK_EQUAL(i1->width(), f1->GetWidth()); - CHECK_EQUAL(true, f1->has_image_data); + CHECK(f1->GetHeight() == i1->height()); + CHECK(f1->GetWidth() == i1->width()); + CHECK(f1->has_image_data == true); } -TEST(Copy_Constructor) +TEST_CASE( "Copy_Constructor", "[libopenshot][frame]" ) { // Create a dummy Frame openshot::Frame f1(1, 800, 600, "#000000"); @@ -126,38 +126,38 @@ TEST(Copy_Constructor) // Load an image std::stringstream path; path << TEST_MEDIA_PATH << "front.png"; - std::shared_ptr i1( new QImage(QString::fromStdString(path.str())) ); + auto i1 = std::make_shared(QString::fromStdString(path.str())); - CHECK_EQUAL(false, i1->isNull()); + CHECK(i1->isNull() == false); // Add image to f1, then copy f1 to f2 f1.AddImage(i1); Frame f2 = f1; - CHECK_EQUAL(f1.GetHeight(), f2.GetHeight()); - CHECK_EQUAL(f1.GetWidth(), f2.GetWidth()); + CHECK(f1.GetHeight() == f2.GetHeight()); + CHECK(f1.GetWidth() == f2.GetWidth()); - CHECK_EQUAL(f1.has_image_data, f2.has_image_data); - CHECK_EQUAL(f1.has_audio_data, f2.has_audio_data); + CHECK(f1.has_image_data == f2.has_image_data); + CHECK(f1.has_audio_data == f2.has_audio_data); Fraction par1 = f1.GetPixelRatio(); Fraction par2 = f2.GetPixelRatio(); - CHECK_EQUAL(par1.num, par2.num); - CHECK_EQUAL(par1.den, par2.den); + CHECK(par1.num == par2.num); + CHECK(par1.den == par2.den); - CHECK_EQUAL(f1.SampleRate(), f2.SampleRate()); - CHECK_EQUAL(f1.GetAudioChannelsCount(), f2.GetAudioChannelsCount()); - CHECK_EQUAL(f1.ChannelsLayout(), f2.ChannelsLayout()); + CHECK(f1.SampleRate() == f2.SampleRate()); + CHECK(f1.GetAudioChannelsCount() == f2.GetAudioChannelsCount()); + CHECK(f1.ChannelsLayout() == f2.ChannelsLayout()); - CHECK_EQUAL(f1.GetBytes(), f2.GetBytes()); - CHECK_EQUAL(f1.GetAudioSamplesCount(), f2.GetAudioSamplesCount()); + CHECK(f1.GetBytes() == f2.GetBytes()); + CHECK(f1.GetAudioSamplesCount() == f2.GetAudioSamplesCount()); } #ifdef USE_OPENCV -TEST(Convert_Image) +TEST_CASE( "Convert_Image", "[libopenshot][opencv][frame]" ) { // Create a video clip std::stringstream path; @@ -171,13 +171,11 @@ TEST(Convert_Image) // Get first Mat image cv::Mat cvimage = f1->GetImageCV(); - CHECK(!cvimage.empty()); + CHECK_FALSE(cvimage.empty()); - CHECK_EQUAL(1, f1->number); - CHECK_EQUAL(f1->GetWidth(), cvimage.cols); - CHECK_EQUAL(f1->GetHeight(), cvimage.rows); - CHECK_EQUAL(3, cvimage.channels()); + CHECK(f1->number == 1); + CHECK(f1->GetWidth() == cvimage.cols); + CHECK(f1->GetHeight() == cvimage.rows); + CHECK(cvimage.channels() == 3); } #endif - -} // SUITE(Frame_Tests) diff --git a/tests/FrameMapper_Tests.cpp b/tests/FrameMapper.cpp similarity index 57% rename from tests/FrameMapper_Tests.cpp rename to tests/FrameMapper.cpp index d586c30c2..20ca76e81 100644 --- a/tests/FrameMapper_Tests.cpp +++ b/tests/FrameMapper.cpp @@ -28,9 +28,8 @@ * along with OpenShot Library. If not, see . */ -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "CacheMemory.h" #include "Clip.h" #include "DummyReader.h" @@ -40,36 +39,33 @@ #include "FrameMapper.h" #include "Timeline.h" -using namespace std; using namespace openshot; -SUITE(FrameMapper) { - -TEST(NoOp_GetMappedFrame) +TEST_CASE( "NoOp_GetMappedFrame", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0); // Create mapping between 24 fps and 24 fps without pulldown FrameMapper mapping(&r, Fraction(24, 1), PULLDOWN_NONE, 22000, 2, LAYOUT_STEREO); - CHECK_EQUAL("FrameMapper", mapping.Name()); + CHECK(mapping.Name() == "FrameMapper"); // Should find this frame MappedFrame f = mapping.GetMappedFrame(100); - CHECK_EQUAL(100, f.Odd.Frame); - CHECK_EQUAL(100, f.Even.Frame); + CHECK(f.Odd.Frame == 100); + CHECK(f.Even.Frame == 100); // Should return end frame f = mapping.GetMappedFrame(150); - CHECK_EQUAL(120, f.Odd.Frame); - CHECK_EQUAL(120, f.Even.Frame); + CHECK(f.Odd.Frame == 120); + CHECK(f.Even.Frame == 120); mapping.Close(); mapping.Reader(nullptr); - CHECK_THROW(mapping.Reader(), ReaderClosed); + CHECK_THROWS_AS(mapping.Reader(), ReaderClosed); } -TEST(Invalid_Frame_Too_Small) +TEST_CASE( "Invalid_Frame_Too_Small", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0); @@ -78,11 +74,11 @@ TEST(Invalid_Frame_Too_Small) FrameMapper mapping(&r, Fraction(30000, 1001), PULLDOWN_CLASSIC, 22000, 2, LAYOUT_STEREO); // Check invalid frame number - CHECK_THROW(mapping.GetMappedFrame(0), OutOfBoundsFrame); + CHECK_THROWS_AS(mapping.GetMappedFrame(0), OutOfBoundsFrame); } -TEST(24_fps_to_30_fps_Pulldown_Classic) +TEST_CASE( "24_fps_to_30_fps_Pulldown_Classic", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0); @@ -93,13 +89,13 @@ TEST(24_fps_to_30_fps_Pulldown_Classic) MappedFrame frame3 = mapping.GetMappedFrame(3); // Check for 3 fields of frame 2 - CHECK_EQUAL(2, frame2.Odd.Frame); - CHECK_EQUAL(2, frame2.Even.Frame); - CHECK_EQUAL(2, frame3.Odd.Frame); - CHECK_EQUAL(3, frame3.Even.Frame); + CHECK(frame2.Odd.Frame == 2); + CHECK(frame2.Even.Frame == 2); + CHECK(frame3.Odd.Frame == 2); + CHECK(frame3.Even.Frame == 3); } -TEST(24_fps_to_30_fps_Pulldown_Advanced) +TEST_CASE( "24_fps_to_30_fps_Pulldown_Advanced", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0); @@ -111,15 +107,15 @@ TEST(24_fps_to_30_fps_Pulldown_Advanced) MappedFrame frame4 = mapping.GetMappedFrame(4); // Check for advanced pulldown (only 1 fake frame) - CHECK_EQUAL(2, frame2.Odd.Frame); - CHECK_EQUAL(2, frame2.Even.Frame); - CHECK_EQUAL(2, frame3.Odd.Frame); - CHECK_EQUAL(3, frame3.Even.Frame); - CHECK_EQUAL(3, frame4.Odd.Frame); - CHECK_EQUAL(3, frame4.Even.Frame); + CHECK(frame2.Odd.Frame == 2); + CHECK(frame2.Even.Frame == 2); + CHECK(frame3.Odd.Frame == 2); + CHECK(frame3.Even.Frame == 3); + CHECK(frame4.Odd.Frame == 3); + CHECK(frame4.Even.Frame == 3); } -TEST(24_fps_to_30_fps_Pulldown_None) +TEST_CASE( "24_fps_to_30_fps_Pulldown_None", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0); @@ -130,13 +126,13 @@ TEST(24_fps_to_30_fps_Pulldown_None) MappedFrame frame5 = mapping.GetMappedFrame(5); // Check for advanced pulldown (only 1 fake frame) - CHECK_EQUAL(4, frame4.Odd.Frame); - CHECK_EQUAL(4, frame4.Even.Frame); - CHECK_EQUAL(4, frame5.Odd.Frame); - CHECK_EQUAL(4, frame5.Even.Frame); + CHECK(frame4.Odd.Frame == 4); + CHECK(frame4.Even.Frame == 4); + CHECK(frame5.Odd.Frame == 4); + CHECK(frame5.Even.Frame == 4); } -TEST(30_fps_to_24_fps_Pulldown_Classic) +TEST_CASE( "30_fps_to_24_fps_Pulldown_Classic", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(30, 1), 720, 480, 22000, 2, 5.0); @@ -148,15 +144,15 @@ TEST(30_fps_to_24_fps_Pulldown_Classic) MappedFrame frame5 = mapping.GetMappedFrame(5); // Check for advanced pulldown (only 1 fake frame) - CHECK_EQUAL(4, frame3.Odd.Frame); - CHECK_EQUAL(3, frame3.Even.Frame); - CHECK_EQUAL(5, frame4.Odd.Frame); - CHECK_EQUAL(4, frame4.Even.Frame); - CHECK_EQUAL(6, frame5.Odd.Frame); - CHECK_EQUAL(6, frame5.Even.Frame); + CHECK(frame3.Odd.Frame == 4); + CHECK(frame3.Even.Frame == 3); + CHECK(frame4.Odd.Frame == 5); + CHECK(frame4.Even.Frame == 4); + CHECK(frame5.Odd.Frame == 6); + CHECK(frame5.Even.Frame == 6); } -TEST(30_fps_to_24_fps_Pulldown_Advanced) +TEST_CASE( "30_fps_to_24_fps_Pulldown_Advanced", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(30, 1), 720, 480, 22000, 2, 5.0); @@ -168,15 +164,15 @@ TEST(30_fps_to_24_fps_Pulldown_Advanced) MappedFrame frame4 = mapping.GetMappedFrame(4); // Check for advanced pulldown (only 1 fake frame) - CHECK_EQUAL(2, frame2.Odd.Frame); - CHECK_EQUAL(2, frame2.Even.Frame); - CHECK_EQUAL(4, frame3.Odd.Frame); - CHECK_EQUAL(4, frame3.Even.Frame); - CHECK_EQUAL(5, frame4.Odd.Frame); - CHECK_EQUAL(5, frame4.Even.Frame); + CHECK(frame2.Odd.Frame == 2); + CHECK(frame2.Even.Frame == 2); + CHECK(frame3.Odd.Frame == 4); + CHECK(frame3.Even.Frame == 4); + CHECK(frame4.Odd.Frame == 5); + CHECK(frame4.Even.Frame == 5); } -TEST(30_fps_to_24_fps_Pulldown_None) +TEST_CASE( "30_fps_to_24_fps_Pulldown_None", "[libopenshot][framemapper]" ) { // Create a reader DummyReader r(Fraction(30, 1), 720, 480, 22000, 2, 5.0); @@ -187,16 +183,16 @@ TEST(30_fps_to_24_fps_Pulldown_None) MappedFrame frame5 = mapping.GetMappedFrame(5); // Check for advanced pulldown (only 1 fake frame) - CHECK_EQUAL(4, frame4.Odd.Frame); - CHECK_EQUAL(4, frame4.Even.Frame); - CHECK_EQUAL(6, frame5.Odd.Frame); - CHECK_EQUAL(6, frame5.Even.Frame); + CHECK(frame4.Odd.Frame == 4); + CHECK(frame4.Even.Frame == 4); + CHECK(frame5.Odd.Frame == 6); + CHECK(frame5.Even.Frame == 6); } -TEST(resample_audio_48000_to_41000) +TEST_CASE( "resample_audio_48000_to_41000", "[libopenshot][framemapper]" ) { // Create a reader: 24 fps, 2 channels, 48000 sample rate - stringstream path; + std::stringstream path; path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4"; FFmpegReader r(path.str()); @@ -205,25 +201,25 @@ TEST(resample_audio_48000_to_41000) map.Open(); // Check details - CHECK_EQUAL(3, map.GetFrame(1)->GetAudioChannelsCount()); - CHECK_EQUAL(1470, map.GetFrame(1)->GetAudioSamplesCount()); - CHECK_EQUAL(1470, map.GetFrame(2)->GetAudioSamplesCount()); - CHECK_EQUAL(1470, map.GetFrame(50)->GetAudioSamplesCount()); + CHECK(map.GetFrame(1)->GetAudioChannelsCount() == 3); + CHECK(map.GetFrame(1)->GetAudioSamplesCount() == 1470); + CHECK(map.GetFrame(2)->GetAudioSamplesCount() == 1470); + CHECK(map.GetFrame(50)->GetAudioSamplesCount() == 1470); // Change mapping data map.ChangeMapping(Fraction(25,1), PULLDOWN_NONE, 22050, 1, LAYOUT_MONO); // Check details - CHECK_EQUAL(1, map.GetFrame(1)->GetAudioChannelsCount()); - CHECK_CLOSE(882, map.GetFrame(1)->GetAudioSamplesCount(), 10.0); - CHECK_CLOSE(882, map.GetFrame(2)->GetAudioSamplesCount(), 10.0); - CHECK_CLOSE(882, map.GetFrame(50)->GetAudioSamplesCount(), 10.0); + CHECK(map.GetFrame(1)->GetAudioChannelsCount() == 1); + CHECK(map.GetFrame(1)->GetAudioSamplesCount() == Approx(882).margin(10.0)); + CHECK(map.GetFrame(2)->GetAudioSamplesCount() == Approx(882).margin(10.0)); + CHECK(map.GetFrame(50)->GetAudioSamplesCount() == Approx(882).margin(10.0)); // Close mapper map.Close(); } -TEST(resample_audio_mapper) { +TEST_CASE( "resample_audio_mapper", "[libopenshot][framemapper]" ) { // This test verifies that audio data can be resampled on FrameMapper // instances, even on frame rates that do not divide evenly, and that no audio data is misplaced // or duplicated. We verify this by creating a SIN wave, add those data points to a DummyReader, @@ -232,26 +228,26 @@ TEST(resample_audio_mapper) { // Create cache object to hold test frames CacheMemory cache; - int OFFSET = 0; - float AMPLITUDE = 0.75; - double ANGLE = 0.0; - int NUM_SAMPLES = 100; + const int OFFSET = 0; + const float AMPLITUDE = 0.75; + const int NUM_SAMPLES = 100; + double angle = 0.0; // Let's create some test frames for (int64_t frame_number = 1; frame_number <= 90; frame_number++) { // Create blank frame (with specific frame #, samples, and channels) // Sample count should be 44100 / 30 fps = 1470 samples per frame int sample_count = 1470; - std::shared_ptr f(new openshot::Frame(frame_number, sample_count, 2)); + auto f = std::make_shared(frame_number, sample_count, 2); // Create test samples with sin wave (predictable values) float *audio_buffer = new float[sample_count * 2]; for (int sample_number = 0; sample_number < sample_count; sample_number++) { // Calculate sin wave - float sample_value = float(AMPLITUDE * sin(ANGLE) + OFFSET); + float sample_value = float(AMPLITUDE * sin(angle) + OFFSET); audio_buffer[sample_number] = abs(sample_value); - ANGLE += (2 * M_PI) / NUM_SAMPLES; + angle += (2 * M_PI) / NUM_SAMPLES; } // Add custom audio samples to Frame (bool replaceSamples, int destChannel, int destStartSample, const float* source, @@ -260,17 +256,19 @@ TEST(resample_audio_mapper) { // Add test frame to dummy reader cache.Add(f); + + delete[] audio_buffer; } // Create a default fraction (should be 1/1) - openshot::DummyReader r(openshot::Fraction(30, 1), 1920, 1080, 44100, 2, 30.0, &cache); + openshot::DummyReader r(openshot::Fraction(30, 1), 1, 1, 44100, 2, 30.0, &cache); r.Open(); // Open the reader // Sample rates - vector arr = { 44100, 16000 }; + std::vector arr = { 44100, 16000 }; for (auto& rate : arr) { // Reset SIN wave - ANGLE = 0.0; + angle = 0.0; // Map to 24 fps, which should create a variable # of samples per frame FrameMapper map(&r, Fraction(24,1), PULLDOWN_NONE, rate, 2, LAYOUT_STEREO); @@ -287,21 +285,21 @@ TEST(resample_audio_mapper) { for (int sample_index = 0; sample_index < sample_count; sample_index++) { // Calculate sin wave - float sample_value = abs(float(AMPLITUDE * sin(ANGLE) + OFFSET)); - ANGLE += (2 * M_PI) / (NUM_SAMPLES * resample_multiplier); + float sample_value = abs(float(AMPLITUDE * sin(angle) + OFFSET)); + angle += (2 * M_PI) / (NUM_SAMPLES * resample_multiplier); // Verify each mapped sample value is correct (after being redistributed by the FrameMapper) float resampled_value = map.GetFrame(frame_index)->GetAudioSample(0, sample_index, 1.0); // TODO: 0.1 is much to broad to accurately test this, but without this, all the resampled values are too far away from expected - CHECK_CLOSE(sample_value, resampled_value, 0.1); + CHECK(resampled_value == Approx(sample_value).margin(0.1)); } // Increment sample value num_samples += map.GetFrame(frame_index)->GetAudioSamplesCount(); } // Verify samples per second is correct (i.e. 44100) - CHECK_EQUAL(num_samples, map.info.sample_rate); + CHECK(map.info.sample_rate == num_samples); // Create Timeline (same specs as reader) Timeline t1(map.info.width, map.info.height, map.info.fps, rate, map.info.channels, map.info.channel_layout); @@ -331,22 +329,23 @@ TEST(resample_audio_mapper) { t1.Open(); // Reset SIN wave - ANGLE = 0.0; + angle = 0.0; for (int frame_index = 1; frame_index < 24; frame_index++) { - t1.GetFrame(frame_index); - for (int sample_index = 0; sample_index < t1.GetFrame(frame_index)->GetAudioSamplesCount(); sample_index++) { + auto f = t1.GetFrame(frame_index); + auto sample_count = f->GetAudioSamplesCount(); + for (int i = 0; i < sample_count; i++) { // Calculate sin wave - float sample_value = abs(float(AMPLITUDE * sin(ANGLE) + OFFSET)); - ANGLE += (2 * M_PI) / (NUM_SAMPLES * resample_multiplier); + float sample_value = abs(float(AMPLITUDE * sin(angle) + OFFSET)); + angle += (2 * M_PI) / (NUM_SAMPLES * resample_multiplier); // Verify each mapped sample value is correct (after being redistributed by the FrameMapper) - float resampled_value = t1.GetFrame(frame_index)->GetAudioSample(0, sample_index, 1.0); + float resampled_value = f->GetAudioSample(0, i, 1.0); // TODO: 0.1 is much to broad to accurately test this, but without this, all the resampled values are too far away from expected // Testing wave value X 2, since we have 2 overlapping clips - CHECK_CLOSE(sample_value * 2.0, resampled_value, 0.1); + CHECK(resampled_value == Approx(sample_value * 2.0).margin(0.1)); } } @@ -362,7 +361,7 @@ TEST(resample_audio_mapper) { r.Close(); } -TEST(redistribute_samples_per_frame) { +TEST_CASE( "redistribute_samples_per_frame", "[libopenshot][framemapper]" ) { // This test verifies that audio data is correctly aligned on // FrameMapper instances. We do this by creating 2 Clips based on the same parent reader // (i.e. same exact audio sample data). We use a Timeline to overlap these clips @@ -380,7 +379,7 @@ TEST(redistribute_samples_per_frame) { // Create blank frame (with specific frame #, samples, and channels) // Sample count should be 44100 / 30 fps = 1470 samples per frame int sample_count = 1470; - std::shared_ptr f(new openshot::Frame(frame_number, sample_count, 2)); + auto f = std::make_shared(frame_number, sample_count, 2); // Create test samples with incrementing value float *audio_buffer = new float[sample_count]; @@ -397,6 +396,8 @@ TEST(redistribute_samples_per_frame) { // Add test frame to dummy reader cache.Add(f); + + delete[] audio_buffer; } // Create a default fraction (should be 1/1) @@ -404,7 +405,7 @@ TEST(redistribute_samples_per_frame) { r.Open(); // Open the reader // Sample rates - vector arr = { 24, 30, 60 }; + std::vector arr = { 24, 30, 60 }; for (auto& fps : arr) { // Map to 24 fps, which should create a variable # of samples per frame FrameMapper map(&r, Fraction(fps,1), PULLDOWN_NONE, 44100, 2, LAYOUT_STEREO); @@ -414,16 +415,16 @@ TEST(redistribute_samples_per_frame) { // Loop through samples, and verify FrameMapper didn't mess up individual sample values sample_value = 0; for (int frame_index = 1; frame_index <= map.info.fps.ToInt(); frame_index++) { - for (int sample_index = 0; sample_index < map.GetFrame(frame_index)->GetAudioSamplesCount(); sample_index++) { + for (int i = 0; i < map.GetFrame(frame_index)->GetAudioSamplesCount(); i++) { // Verify each mapped sample value is correct (after being redistributed by the FrameMapper) - CHECK_EQUAL(sample_value + sample_index, map.GetFrame(frame_index)->GetAudioSample(0, sample_index, 1.0)); + CHECK(map.GetFrame(frame_index)->GetAudioSample(0, i, 1.0) == sample_value + i); } // Increment sample value sample_value += map.GetFrame(frame_index)->GetAudioSamplesCount(); } // Verify samples per second is correct (i.e. 44100) - CHECK_EQUAL(sample_value, map.info.sample_rate); + CHECK(map.info.sample_rate == sample_value); // Create Timeline (same specs as reader) Timeline t1(map.info.width, map.info.height, map.info.fps, 44100, map.info.channels, map.info.channel_layout); @@ -467,7 +468,7 @@ TEST(redistribute_samples_per_frame) { // Check if sample_value - previous_value == 2 // This should be true, because the DummyReader is added twice to the Timeline, and is overlapping // This should be an ever increasing linear curve, increasing by 2 each sample on the Timeline - CHECK_EQUAL(2, sample_diff); + CHECK(sample_diff == 2); // Set previous sample value previous_sample_value = t1.GetFrame(frame_index)->GetAudioSample(0, sample_index, 1.0); @@ -485,7 +486,140 @@ TEST(redistribute_samples_per_frame) { r.Close(); } -TEST(Json) +TEST_CASE( "Distribute samples", "[libopenshot][framemapper]" ) { + // This test verifies that audio data can be redistributed correctly + // between common and uncommon frame rates + int sample_rate = 48000; + int channels = 2; + int num_seconds = 1; + + // Source frame rates (varies the # of samples per frame) + std::vector rates = { + openshot::Fraction(30,1), + openshot::Fraction(24,1) , + openshot::Fraction(119,4), + openshot::Fraction(30000,1001) + }; + + for (auto& frame_rate : rates) { + // Init sin wave variables + const int OFFSET = 0; + const float AMPLITUDE = 0.75; + const int NUM_SAMPLES = 100; + double angle = 0.0; + + // Create cache object to hold test frames + openshot::CacheMemory cache; + + // Let's create some test frames + for (int64_t frame_number = 1; frame_number <= (frame_rate.ToFloat() * num_seconds * 2); ++frame_number) { + // Create blank frame (with specific frame #, samples, and channels) + int sample_count = openshot::Frame::GetSamplesPerFrame(frame_number, frame_rate, sample_rate, channels); + auto f = std::make_shared(frame_number, sample_count, channels); + f->SampleRate(sample_rate); + + // Create test samples with sin wave (predictable values) + float *audio_buffer = new float[sample_count * 2]; + for (int sample_number = 0; sample_number < sample_count; sample_number++) { + // Calculate sin wave + float sample_value = float(AMPLITUDE * sin(angle) + OFFSET); + audio_buffer[sample_number] = abs(sample_value); + angle += (2 * M_PI) / NUM_SAMPLES; + } + + // Add custom audio samples to Frame (bool replaceSamples, int destChannel, int destStartSample, const float* source, + f->AddAudio(true, 0, 0, audio_buffer, sample_count, 1.0); // add channel 1 + f->AddAudio(true, 1, 0, audio_buffer, sample_count, 1.0); // add channel 2 + + // Add test frame to dummy reader + cache.Add(f); + + delete[] audio_buffer; + } + + openshot::DummyReader r(frame_rate, 1920, 1080, sample_rate, channels, 30.0, &cache); + r.Open(); + + // Target frame rates + std::vector mapped_rates = { + openshot::Fraction(30,1), + openshot::Fraction(24,1), + openshot::Fraction(119,4), + openshot::Fraction(30000,1001) + }; + for (auto &mapped_rate : mapped_rates) { + // Reset SIN wave + angle = 0.0; + + // Map to different fps + FrameMapper map(&r, mapped_rate, PULLDOWN_NONE, sample_rate, channels, LAYOUT_STEREO); + map.info.has_audio = true; + map.Open(); + + // Loop through samples, and verify FrameMapper didn't mess up individual sample values + int num_samples = 0; + for (int frame_index = 1; frame_index <= (map.info.fps.ToInt() * num_seconds); frame_index++) { + int sample_count = map.GetFrame(frame_index)->GetAudioSamplesCount(); + for (int sample_index = 0; sample_index < sample_count; sample_index++) { + + // Calculate sin wave + float predicted_value = abs(float(AMPLITUDE * sin(angle) + OFFSET)); + angle += (2 * M_PI) / NUM_SAMPLES; + + // Verify each mapped sample value is correct (after being redistributed by the FrameMapper) + float mapped_value = map.GetFrame(frame_index)->GetAudioSample(0, sample_index, 1.0); + CHECK(predicted_value == Approx(mapped_value).margin(0.001)); + } + // Increment sample value + num_samples += map.GetFrame(frame_index)->GetAudioSamplesCount(); + } + + float clip_position = 3.77; + int starting_clip_frame = round(clip_position * map.info.fps.ToFloat()) + 1; + + // Create Timeline (same specs as reader) + Timeline t1(map.info.width, map.info.height, map.info.fps, map.info.sample_rate, map.info.channels, + map.info.channel_layout); + + Clip c1; + c1.Reader(&map); + c1.Layer(1); + c1.Position(clip_position); + c1.Start(0.0); + c1.End(10.0); + + // Add clips + t1.AddClip(&c1); + t1.Open(); + + // Reset SIN wave + angle = 0.0; + + for (int frame_index = starting_clip_frame; frame_index < (starting_clip_frame + (t1.info.fps.ToFloat() * num_seconds)); frame_index++) { + for (int sample_index = 0; sample_index < t1.GetFrame(frame_index)->GetAudioSamplesCount(); sample_index++) { + // Calculate sin wave + float predicted_value = abs(float(AMPLITUDE * sin(angle) + OFFSET)); + angle += (2 * M_PI) / NUM_SAMPLES; + + // Verify each mapped sample value is correct (after being redistributed by the FrameMapper) + float timeline_value = t1.GetFrame(frame_index)->GetAudioSample(0, sample_index, 1.0); + CHECK(predicted_value == Approx(timeline_value).margin(0.001)); + } + } + + // Close mapper + map.Close(); + t1.Close(); + } + + // Clean up reader + r.Close(); + cache.Clear(); + + } // for rates +} + +TEST_CASE( "Json", "[libopenshot][framemapper]" ) { DummyReader r(Fraction(30,1), 1280, 720, 48000, 2, 5.0); FrameMapper map(&r, Fraction(30, 1), PULLDOWN_NONE, 48000, 2, LAYOUT_STEREO); @@ -494,8 +628,6 @@ TEST(Json) const std::string map_config = map.Json(); map.SetJson(map_config); - CHECK_EQUAL(48000, map.info.sample_rate); - CHECK_EQUAL(30, map.info.fps.num); + CHECK(map.info.sample_rate == 48000); + CHECK(map.info.fps.num == 30); } - -} // SUITE diff --git a/tests/ImageWriter_Tests.cpp b/tests/ImageWriter.cpp similarity index 75% rename from tests/ImageWriter_Tests.cpp rename to tests/ImageWriter.cpp index c4afaee00..d50d73c5b 100644 --- a/tests/ImageWriter_Tests.cpp +++ b/tests/ImageWriter.cpp @@ -28,51 +28,46 @@ * along with OpenShot Library. If not, see . */ +#ifdef USE_IMAGEMAGICK + #include #include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include -#ifdef USE_IMAGEMAGICK #include "ImageWriter.h" #include "Exceptions.h" #include "ImageReader.h" #include "FFmpegReader.h" #include "Frame.h" -using namespace std; using namespace openshot; -SUITE(ImageWriter) -{ - -TEST(Gif) +TEST_CASE( "Gif", "[libopenshot][imagewriter]" ) { // Reader --------------- // Bad path FFmpegReader bad_r("/tmp/bleeblorp.xls", false); - CHECK_THROW(bad_r.Open(), InvalidFile); + CHECK_THROWS_AS(bad_r.Open(), InvalidFile); // Good path - stringstream path; + std::stringstream path; path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4"; FFmpegReader r(path.str()); // Read-before-open error - CHECK_THROW(r.GetFrame(1), ReaderClosed); + CHECK_THROWS_AS(r.GetFrame(1), ReaderClosed); r.Open(); /* WRITER ---------------- */ ImageWriter w("output1.gif"); - CHECK_EQUAL(false, w.IsOpen()); + CHECK_FALSE(w.IsOpen()); // Check for exception on write-before-open - CHECK_THROW(w.WriteFrame(&r, 500, 504), WriterClosed); + CHECK_THROWS_AS(w.WriteFrame(&r, 500, 504), WriterClosed); // Set the image output settings (format, fps, width, height, quality, loops, combine) w.SetVideoOptions("GIF", r.info.fps, r.info.width, r.info.height, 70, 1, true); @@ -91,18 +86,18 @@ TEST(Gif) ImageReader r1("output1.gif[4]"); // Basic Reader state queries - CHECK_EQUAL("ImageReader", r1.Name()); + CHECK(r1.Name() == "ImageReader"); CacheBase* c = r1.GetCache(); - CHECK_EQUAL(true, c == nullptr); + CHECK(c == nullptr); - CHECK_EQUAL(false, r1.IsOpen()); + CHECK_FALSE(r1.IsOpen()); r1.Open(); - CHECK_EQUAL(true, r1.IsOpen()); + CHECK(r1.IsOpen() == true); // Verify various settings - CHECK_EQUAL(r.info.width, r1.info.width); - CHECK_EQUAL(r.info.height, r1.info.height); + CHECK(r1.info.width == r.info.width); + CHECK(r1.info.height == r.info.height); // Get a specific frame std::shared_ptr f = r1.GetFrame(8); @@ -112,11 +107,9 @@ TEST(Gif) int pixel_index = 230 * 4; // pixel 230 (4 bytes per pixel) // Check image properties - CHECK_CLOSE(20, (int)pixels[pixel_index], 5); - CHECK_CLOSE(18, (int)pixels[pixel_index + 1], 5); - CHECK_CLOSE(11, (int)pixels[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)pixels[pixel_index + 3], 5); + CHECK((int)pixels[pixel_index] == Approx(20).margin(5)); + CHECK((int)pixels[pixel_index + 1] == Approx(18).margin(5)); + CHECK((int)pixels[pixel_index + 2] == Approx(11).margin(5)); + CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5)); } - -} // SUITE -#endif +#endif // USE_IMAGEMAGICK diff --git a/tests/KeyFrame.cpp b/tests/KeyFrame.cpp new file mode 100644 index 000000000..f3ebdb745 --- /dev/null +++ b/tests/KeyFrame.cpp @@ -0,0 +1,706 @@ +/** + * @file + * @brief Unit tests for openshot::Keyframe + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include + +#include +#include + +#include "KeyFrame.h" +#include "TrackedObjectBBox.h" +#include "Exceptions.h" +#include "Coordinate.h" +#include "Fraction.h" +#include "Clip.h" +#include "Timeline.h" +#include "effects/Tracker.h" +#include "Point.h" + +using namespace openshot; + +TEST_CASE( "GetPoint_With_No_Points", "[libopenshot][keyframe]" ) +{ + // Create an empty keyframe + Keyframe k1; + + CHECK_THROWS_AS(k1.GetPoint(0), OutOfBoundsPoint); +} + +TEST_CASE( "GetPoint_With_1_Points", "[libopenshot][keyframe]" ) +{ + // Create an empty keyframe + Keyframe k1; + k1.AddPoint(openshot::Point(2,3)); + + CHECK_THROWS_AS(k1.GetPoint(-1), OutOfBoundsPoint); + CHECK(k1.GetCount() == 1); + CHECK(k1.GetPoint(0).co.X == Approx(2.0f).margin(0.00001)); + CHECK(k1.GetPoint(0).co.Y == Approx(3.0f).margin(0.00001)); + CHECK_THROWS_AS(k1.GetPoint(1), OutOfBoundsPoint); +} + + +TEST_CASE( "AddPoint_With_1_Point", "[libopenshot][keyframe]" ) +{ + // Create an empty keyframe + Keyframe k1; + k1.AddPoint(openshot::Point(2,9)); + + CHECK(k1.GetPoint(0).co.X == Approx(2.0f).margin(0.00001)); + CHECK_THROWS_AS(k1.GetPoint(-1), OutOfBoundsPoint); + CHECK_THROWS_AS(k1.GetPoint(1), OutOfBoundsPoint); +} + +TEST_CASE( "AddPoint_With_2_Points", "[libopenshot][keyframe]" ) +{ + // Create an empty keyframe + Keyframe k1; + k1.AddPoint(openshot::Point(2,9)); + k1.AddPoint(openshot::Point(5,20)); + + CHECK(k1.GetPoint(0).co.X == Approx(2.0f).margin(0.00001)); + CHECK(k1.GetPoint(1).co.X == Approx(5.0f).margin(0.00001)); + CHECK_THROWS_AS(k1.GetPoint(-1), OutOfBoundsPoint); + CHECK_THROWS_AS(k1.GetPoint(2), OutOfBoundsPoint); +} + +TEST_CASE( "GetValue_For_Bezier_Curve_2_Points", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 4), BEZIER)); + + // Spot check values from the curve + CHECK(kf.GetValue(-1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(0) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(9) == Approx(1.12414f).margin(0.0001)); + CHECK(kf.GetValue(20) == Approx(1.86370f).margin(0.0001)); + CHECK(kf.GetValue(40) == Approx(3.79733f).margin(0.0001)); + CHECK(kf.GetValue(50) == Approx(4.0f).margin(0.0001)); + // Check the expected number of values + CHECK(kf.GetLength() == 51); +} + +TEST_CASE( "GetValue_For_Bezier_Curve_5_Points_40_Percent_Handle", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 4), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(100, 10), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(150, 0), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(200, 3), BEZIER)); + + // Spot check values from the curve + CHECK(1.0f == Approx(kf.GetValue(-1)).margin(0.0001)); + CHECK(kf.GetValue(0) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(27) == Approx(2.68197f).margin(0.0001)); + CHECK(kf.GetValue(77) == Approx(7.47719f).margin(0.0001)); + CHECK(kf.GetValue(127) == Approx(4.20468f).margin(0.0001)); + CHECK(kf.GetValue(177) == Approx(1.73860f).margin(0.0001)); + CHECK(kf.GetValue(200) == Approx(3.0f).margin(0.0001)); + // Check the expected number of values + CHECK(kf.GetLength() == 201); +} + +TEST_CASE( "GetValue_For_Bezier_Curve_5_Points_25_Percent_Handle", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 4), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(100, 10), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(150, 0), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(200, 3), BEZIER)); + + // Spot check values from the curve + CHECK(kf.GetValue(-1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(0) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(27) == Approx(2.68197f).margin(0.0001)); + CHECK(kf.GetValue(77) == Approx(7.47719f).margin(0.0001)); + CHECK(kf.GetValue(127) == Approx(4.20468f).margin(0.0001)); + CHECK(kf.GetValue(177) == Approx(1.73860f).margin(0.0001)); + CHECK(kf.GetValue(200) == Approx(3.0f).margin(0.0001)); + // Check the expected number of values + CHECK(kf.GetLength() == 201); +} + +TEST_CASE( "GetValue_For_Linear_Curve_3_Points", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), LINEAR)); + + // Spot check values from the curve + CHECK(kf.GetValue(-1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(0) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(9) == Approx(3.33333f).margin(0.0001)); + CHECK(kf.GetValue(20) == Approx(6.54167f).margin(0.0001)); + CHECK(kf.GetValue(40) == Approx(4.4f).margin(0.0001)); + CHECK(kf.GetValue(50) == Approx(2.0f).margin(0.0001)); + // Check the expected number of values + CHECK(kf.GetLength() == 51); +} + +TEST_CASE( "GetValue_For_Constant_Curve_3_Points", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), CONSTANT)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), CONSTANT)); + + // Spot check values from the curve + CHECK(kf.GetValue(-1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(0) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(24) == Approx(1.0f).margin(0.0001)); + CHECK(kf.GetValue(25) == Approx(8.0f).margin(0.0001)); + CHECK(kf.GetValue(40) == Approx(8.0f).margin(0.0001)); + CHECK(kf.GetValue(49) == Approx(8.0f).margin(0.0001)); + CHECK(kf.GetValue(50) == Approx(2.0f).margin(0.0001)); + // Check the expected number of values + CHECK(kf.GetLength() == 51); +} + +TEST_CASE( "Check_Direction_and_Repeat_Fractions", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 500); + kf.AddPoint(400, 100); + kf.AddPoint(500, 500); + + // Spot check values from the curve + CHECK(kf.GetInt(1) == 500); + CHECK_FALSE(kf.IsIncreasing(1)); + CHECK(kf.GetRepeatFraction(1).num == 1); + CHECK(kf.GetRepeatFraction(1).den == 13); + CHECK(kf.GetDelta(1) == 500); + + CHECK(kf.GetInt(24) == 498); + CHECK_FALSE(kf.IsIncreasing(24)); + CHECK(kf.GetRepeatFraction(24).num == 3); + CHECK(kf.GetRepeatFraction(24).den == 6); + CHECK(kf.GetDelta(24) == 0); + + CHECK(kf.GetLong(390) == 100); + CHECK(kf.IsIncreasing(390) == true); + CHECK(kf.GetRepeatFraction(390).num == 3); + CHECK(kf.GetRepeatFraction(390).den == 16); + CHECK(kf.GetDelta(390) == 0); + + CHECK(kf.GetLong(391) == 100); + CHECK(kf.IsIncreasing(391) == true); + CHECK(kf.GetRepeatFraction(391).num == 4); + CHECK(kf.GetRepeatFraction(391).den == 16); + CHECK(kf.GetDelta(388) == -1); +} + + +TEST_CASE( "Get_Closest_Point", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 0.0); + kf.AddPoint(1000, 1.0); + kf.AddPoint(2500, 0.0); + + // Spot check values from the curve (to the right) + CHECK(kf.GetClosestPoint(openshot::Point(900, 900)).co.X == 1000); + CHECK(kf.GetClosestPoint(openshot::Point(1, 1)).co.X == 1); + CHECK(kf.GetClosestPoint(openshot::Point(5, 5)).co.X == 1000); + CHECK(kf.GetClosestPoint(openshot::Point(1000, 1000)).co.X == 1000); + CHECK(kf.GetClosestPoint(openshot::Point(1001, 1001)).co.X == 2500); + CHECK(kf.GetClosestPoint(openshot::Point(2500, 2500)).co.X == 2500); + CHECK(kf.GetClosestPoint(openshot::Point(3000, 3000)).co.X == 2500); + + // Spot check values from the curve (to the left) + CHECK(kf.GetClosestPoint(openshot::Point(900, 900), true).co.X == 1); + CHECK(kf.GetClosestPoint(openshot::Point(1, 1), true).co.X == 1); + CHECK(kf.GetClosestPoint(openshot::Point(5, 5), true).co.X == 1); + CHECK(kf.GetClosestPoint(openshot::Point(1000, 1000), true).co.X == 1); + CHECK(kf.GetClosestPoint(openshot::Point(1001, 1001), true).co.X == 1000); + CHECK(kf.GetClosestPoint(openshot::Point(2500, 2500), true).co.X == 1000); + CHECK(kf.GetClosestPoint(openshot::Point(3000, 3000), true).co.X == 2500); +} + + +TEST_CASE( "Get_Previous_Point", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 0.0); + kf.AddPoint(1000, 1.0); + kf.AddPoint(2500, 0.0); + + // Spot check values from the curve + CHECK(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(900, 900))).co.X == 1); + CHECK(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1, 1))).co.X == 1); + CHECK(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(5, 5))).co.X == 1); + CHECK(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1000, 1000))).co.X == 1); + CHECK(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(1001, 1001))).co.X == 1000); + CHECK(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(2500, 2500))).co.X == 1000); + CHECK(kf.GetPreviousPoint(kf.GetClosestPoint(openshot::Point(3000, 3000))).co.X == 1000); + +} + +TEST_CASE( "Get_Max_Point", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve + Keyframe kf; + kf.AddPoint(1, 1.0); + + // Spot check values from the curve + CHECK(kf.GetMaxPoint().co.Y == 1.0); + + kf.AddPoint(2, 0.0); + + // Spot check values from the curve + CHECK(kf.GetMaxPoint().co.Y == 1.0); + + kf.AddPoint(3, 2.0); + + // Spot check values from the curve + CHECK(kf.GetMaxPoint().co.Y == 2.0); + + kf.AddPoint(4, 1.0); + + // Spot check values from the curve + CHECK(kf.GetMaxPoint().co.Y == 2.0); +} + +TEST_CASE( "Scale_Keyframe", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), BEZIER)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), BEZIER)); + + // Spot check values from the curve + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.01)); + CHECK(kf.GetValue(24) == Approx(7.99f).margin(0.01)); + CHECK(kf.GetValue(25) == Approx(8.0f).margin(0.01)); + CHECK(kf.GetValue(40) == Approx(3.85f).margin(0.01)); + CHECK(kf.GetValue(49) == Approx(2.01f).margin(0.01)); + CHECK(kf.GetValue(50) == Approx(2.0f).margin(0.01)); + + // Resize / Scale the keyframe + kf.ScalePoints(2.0); // 100% larger + + // Spot check values from the curve + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.01)); + CHECK(kf.GetValue(24) == Approx(4.08f).margin(0.01)); + CHECK(kf.GetValue(25) == Approx(4.36f).margin(0.01)); + CHECK(kf.GetValue(40) == Approx(7.53f).margin(0.01)); + CHECK(kf.GetValue(49) == Approx(7.99f).margin(0.01)); + CHECK(kf.GetValue(50) == Approx(8.0f).margin(0.01)); + CHECK(kf.GetValue(90) == Approx(2.39f).margin(0.01)); + CHECK(kf.GetValue(100) == Approx(2.0f).margin(0.01)); + + // Resize / Scale the keyframe + kf.ScalePoints(0.5); // 50% smaller, which should match the original size + + // Spot check values from the curve + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.01)); + CHECK(kf.GetValue(24) == Approx(7.99f).margin(0.01)); + CHECK(kf.GetValue(25) == Approx(8.0f).margin(0.01)); + CHECK(kf.GetValue(40) == Approx(3.85f).margin(0.01)); + CHECK(kf.GetValue(49) == Approx(2.01f).margin(0.01)); + CHECK(kf.GetValue(50) == Approx(2.0f).margin(0.01)); + +} + +TEST_CASE( "Flip_Keyframe", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(25, 8), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(50, 2), LINEAR)); + kf.AddPoint(openshot::Point(Coordinate(100, 10), LINEAR)); + + // Spot check values from the curve + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.01)); + CHECK(kf.GetValue(25) == Approx(8.0f).margin(0.01)); + CHECK(kf.GetValue(50) == Approx(2.0f).margin(0.01)); + CHECK(kf.GetValue(100) == Approx(10.0f).margin(0.01)); + + // Flip the points + kf.FlipPoints(); + + // Spot check values from the curve + CHECK(kf.GetValue(1) == Approx(10.0f).margin(0.01)); + CHECK(kf.GetValue(25) == Approx(2.0f).margin(0.01)); + CHECK(kf.GetValue(50) == Approx(8.0f).margin(0.01)); + CHECK(kf.GetValue(100) == Approx(1.0f).margin(0.01)); + + // Flip the points again (back to the original) + kf.FlipPoints(); + + // Spot check values from the curve + CHECK(kf.GetValue(1) == Approx(1.0f).margin(0.01)); + CHECK(kf.GetValue(25) == Approx(8.0f).margin(0.01)); + CHECK(kf.GetValue(50) == Approx(2.0f).margin(0.01)); + CHECK(kf.GetValue(100) == Approx(10.0f).margin(0.01)); +} + +TEST_CASE( "Remove_Duplicate_Point", "[libopenshot][keyframe]" ) +{ + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 0.0); + kf.AddPoint(1, 1.0); + kf.AddPoint(1, 2.0); + + // Spot check values from the curve + CHECK(kf.GetLength() == 1); + CHECK(kf.GetPoint(0).co.Y == Approx(2.0).margin(0.01)); +} + +TEST_CASE( "Large_Number_Values", "[libopenshot][keyframe]" ) +{ + // Large value + int64_t const large_value = 30 * 60 * 90; + + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 1.0); + kf.AddPoint(large_value, 100.0); // 90 minutes long + + // Spot check values from the curve + CHECK(kf.GetLength() == large_value + 1); + CHECK(kf.GetPoint(0).co.Y == Approx(1.0).margin(0.01)); + CHECK(kf.GetPoint(1).co.Y == Approx(100.0).margin(0.01)); +} + +TEST_CASE( "Remove_Point", "[libopenshot][keyframe]" ) +{ + Keyframe kf; + kf.AddPoint(openshot::Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(openshot::Point(Coordinate(3, 100), CONSTANT)); + CHECK(kf.GetInt(2) == 1); + kf.AddPoint(openshot::Point(Coordinate(2, 50), CONSTANT)); + CHECK(kf.GetInt(2) == 50); + kf.RemovePoint(1); // This is the index of point with X == 2 + CHECK(kf.GetInt(2) == 1); + CHECK_THROWS_AS(kf.RemovePoint(100), OutOfBoundsPoint); +} + +TEST_CASE( "Constant_Interpolation_First_Segment", "[libopenshot][keyframe]" ) +{ + Keyframe kf; + kf.AddPoint(Point(Coordinate(1, 1), CONSTANT)); + kf.AddPoint(Point(Coordinate(2, 50), CONSTANT)); + kf.AddPoint(Point(Coordinate(3, 100), CONSTANT)); + CHECK(kf.GetInt(0) == 1); + CHECK(kf.GetInt(1) == 1); + CHECK(kf.GetInt(2) == 50); + CHECK(kf.GetInt(3) == 100); + CHECK(kf.GetInt(4) == 100); +} + +TEST_CASE( "isIncreasing", "[libopenshot][keyframe]" ) +{ + // Which cases need to be tested to keep same behaviour as + // previously? + // + // - "invalid point" => true + // - point where all next values are equal => false + // - point where first non-eq next value is smaller => false + // - point where first non-eq next value is larger => true + Keyframe kf; + kf.AddPoint(1, 1, LINEAR); // testing with linear + kf.AddPoint(3, 5, BEZIER); // testing with bezier + kf.AddPoint(6, 10, CONSTANT); // first non-eq is smaller + kf.AddPoint(8, 8, CONSTANT); // first non-eq is larger + kf.AddPoint(10, 10, CONSTANT); // all next values are equal + kf.AddPoint(15, 10, CONSTANT); + + // "invalid points" + CHECK(kf.IsIncreasing(0) == true); + CHECK(kf.IsIncreasing(15) == true); + // all next equal + CHECK_FALSE(kf.IsIncreasing(12)); + // first non-eq is larger + CHECK(kf.IsIncreasing(8) == true); + // first non-eq is smaller + CHECK_FALSE(kf.IsIncreasing(6)); + // bezier and linear + CHECK(kf.IsIncreasing(4) == true); + CHECK(kf.IsIncreasing(2) == true); +} + +TEST_CASE( "GetLength", "[libopenshot][keyframe]" ) +{ + Keyframe f; + CHECK(f.GetLength() == 0); + f.AddPoint(1, 1); + CHECK(f.GetLength() == 1); + f.AddPoint(2, 1); + CHECK(f.GetLength() == 3); + f.AddPoint(200, 1); + CHECK(f.GetLength() == 201); + + Keyframe g; + g.AddPoint(200, 1); + CHECK(g.GetLength() == 1); + g.AddPoint(1,1); + CHECK(g.GetLength() == 201); +} + +TEST_CASE( "Use_Interpolation_of_Segment_End_Point", "[libopenshot][keyframe]" ) +{ + Keyframe f; + f.AddPoint(1,0, CONSTANT); + f.AddPoint(100,155, BEZIER); + CHECK(f.GetValue(50) == Approx(75.9).margin(0.1)); +} + +TEST_CASE( "Handle_Large_Segment", "[libopenshot][keyframe]" ) +{ + Keyframe kf; + kf.AddPoint(1, 0, CONSTANT); + kf.AddPoint(1000000, 1, LINEAR); + + CHECK(kf.GetValue(500000) == Approx(0.5).margin(0.01)); + CHECK(kf.IsIncreasing(10) == true); + + Fraction fr = kf.GetRepeatFraction(250000); + CHECK((double)fr.num / fr.den == Approx(0.5).margin(0.01)); +} + +TEST_CASE( "Point_Vector_Constructor", "[libopenshot][keyframe]" ) +{ + std::vector points{Point(1, 10), Point(5, 20), Point(10, 30)}; + Keyframe k1(points); + + CHECK(k1.GetLength() == 11); + CHECK(k1.GetValue(10) == Approx(30.0f).margin(0.0001)); +} + +TEST_CASE( "TrackedObjectBBox init", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + // XXX: This really needs to perform some sort of _test_ here, + // like confirming some default values in the new object. +} + +TEST_CASE( "TrackedObjectBBox AddBox and RemoveBox", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + + CHECK(kfb.Contains(1) == true); + CHECK(kfb.GetLength() == 1); + + kfb.RemoveBox(1); + + CHECK_FALSE(kfb.Contains(1)); + CHECK(kfb.GetLength() == 0); +} + +TEST_CASE( "TrackedObjectBBox GetVal", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + + BBox val = kfb.GetBox(1); + + CHECK(val.cx == 10.0); + CHECK(val.cy == 10.0); + CHECK(val.width) == 100.0; + CHECK(val.height == 100.0); + CHECK(val.angle == 0.0); +} + +TEST_CASE( "TrackedObjectBBox GetVal interpolation", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + kfb.AddBox(11, 20.0, 20.0, 100.0, 100.0, 0.0); + kfb.AddBox(21, 30.0, 30.0, 100.0, 100.0, 0.0); + kfb.AddBox(31, 40.0, 40.0, 100.0, 100.0, 0.0); + + BBox val = kfb.GetBox(5); + + CHECK(val.cx == 14.0); + CHECK(val.cy == 14.0); + CHECK(val.width == 100.0); + CHECK(val.height == 100.0); + + val = kfb.GetBox(15); + + CHECK(val.cx == 24.0); + CHECK(val.cy == 24.0); + CHECK(val.width == 100.0); + CHECK(val.height == 100.0); + + val = kfb.GetBox(25); + + CHECK(val.cx == 34.0); + CHECK(val.cy == 34.0); + CHECK(val.width == 100.0); + CHECK(val.height == 100.0); + +} + + +TEST_CASE( "TrackedObjectBBox SetJson", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 100.0, 100.0, 0.0); + kfb.AddBox(10, 20.0, 20.0, 100.0, 100.0, 0.0); + kfb.AddBox(20, 30.0, 30.0, 100.0, 100.0, 0.0); + kfb.AddBox(30, 40.0, 40.0, 100.0, 100.0, 0.0); + + kfb.scale_x.AddPoint(1, 2.0); + kfb.scale_x.AddPoint(10, 3.0); + + kfb.SetBaseFPS(Fraction(24.0, 1.0)); + + auto dataJSON = kfb.Json(); + TrackedObjectBBox fromJSON_kfb; + fromJSON_kfb.SetJson(dataJSON); + + CHECK(kfb.GetBaseFPS().num == fromJSON_kfb.GetBaseFPS().num); + + double time_kfb = kfb.FrameNToTime(1, 1.0); + double time_fromJSON_kfb = fromJSON_kfb.FrameNToTime(1, 1.0); + CHECK(time_kfb == time_fromJSON_kfb); + + BBox kfb_bbox = kfb.BoxVec[time_kfb]; + BBox fromJSON_bbox = fromJSON_kfb.BoxVec[time_fromJSON_kfb]; + + CHECK(kfb_bbox.cx == fromJSON_bbox.cx); + CHECK(kfb_bbox.cy == fromJSON_bbox.cy); + CHECK(kfb_bbox.width == fromJSON_bbox.width); + CHECK(kfb_bbox.height == fromJSON_bbox.height); + CHECK(kfb_bbox.angle == fromJSON_bbox.angle); +} + +TEST_CASE( "TrackedObjectBBox scaling", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox kfb; + + kfb.AddBox(1, 10.0, 10.0, 10.0, 10.0, 0.0); + kfb.scale_x.AddPoint(1.0, 2.0); + kfb.scale_y.AddPoint(1.0, 3.0); + + BBox bbox = kfb.GetBox(1); + + CHECK(bbox.width == 20.0); + CHECK(bbox.height == 30.0); +} + +TEST_CASE( "AttachToObject", "[libopenshot][keyframe]" ) +{ + std::stringstream path1, path2; + path1 << TEST_MEDIA_PATH << "test.avi"; + path2 << TEST_MEDIA_PATH << "run.mp4"; + + // Create Timelime + Timeline t(1280, 720, Fraction(25,1), 44100, 2, ChannelLayout::LAYOUT_STEREO); + + // Create Clip and add it to the Timeline + Clip clip(new FFmpegReader(path1.str())); + clip.Id("AAAA1234"); + + // Create a child clip and add it to the Timeline + Clip childClip(new FFmpegReader(path2.str())); + childClip.Id("CHILD123"); + + // Add clips to timeline + t.AddClip(&childClip); + t.AddClip(&clip); + + // Create tracker and add it to clip + Tracker tracker; + clip.AddEffect(&tracker); + + // Save a pointer to trackedData + std::shared_ptr trackedData = tracker.trackedData; + + // Change trackedData scale + trackedData->scale_x.AddPoint(1, 2.0); + CHECK(trackedData->scale_x.GetValue(1) == 2.0); + + // Tracked Data JSON + auto trackedDataJson = trackedData->JsonValue(); + + // Get and cast the trakcedObject + auto trackedObject_base = t.GetTrackedObject("None"); + std::shared_ptr trackedObject = std::static_pointer_cast(trackedObject_base); + CHECK(trackedObject == trackedData); + + // Set trackedObject Json Value + trackedObject->SetJsonValue(trackedDataJson); + + // Attach childClip to tracked object + std::string tracked_id = trackedData->Id(); + childClip.Open(); + childClip.AttachToObject(tracked_id); + + std::shared_ptr trackedTest = std::static_pointer_cast(childClip.GetAttachedObject()); + + CHECK(trackedData->scale_x.GetValue(1) == trackedTest->scale_x.GetValue(1)); + + auto frameTest = childClip.GetFrame(1); + childClip.Close(); + // XXX: Here, too, there needs to be some sort of actual _testing_ of the results +} + +TEST_CASE( "GetBoxValues", "[libopenshot][keyframe]" ) +{ + TrackedObjectBBox trackedDataObject; + trackedDataObject.AddBox(1, 10.0, 10.0, 20.0, 20.0, 30.0); + + std::shared_ptr trackedData = std::make_shared(trackedDataObject); + + auto boxValues = trackedData->GetBoxValues(1); + + CHECK(boxValues["cx"] == 10.0); + CHECK(boxValues["cy"] == 10.0); + CHECK(boxValues["w"] == 20.0); + CHECK(boxValues["h"] == 20.0); + CHECK(boxValues["ang"] == 30.0); +} diff --git a/tests/Point_Tests.cpp b/tests/Point.cpp similarity index 54% rename from tests/Point_Tests.cpp rename to tests/Point.cpp index 39012845f..45f493e07 100644 --- a/tests/Point_Tests.cpp +++ b/tests/Point.cpp @@ -28,98 +28,96 @@ * along with OpenShot Library. If not, see . */ -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "Point.h" #include "Enums.h" +#include "Exceptions.h" #include "Coordinate.h" #include "Json.h" -SUITE(POINT) { - -TEST(Default_Constructor) +TEST_CASE( "Default_Constructor", "[libopenshot][point]" ) { openshot::Point p; // Default values - CHECK_EQUAL(1, p.co.X); - CHECK_EQUAL(0, p.co.Y); - CHECK_EQUAL(0.5, p.handle_left.X); - CHECK_EQUAL(1.0, p.handle_left.Y); - CHECK_EQUAL(0.5, p.handle_right.X); - CHECK_EQUAL(0.0, p.handle_right.Y); - CHECK_EQUAL(openshot::InterpolationType::BEZIER, p.interpolation); - CHECK_EQUAL(openshot::HandleType::AUTO, p.handle_type); + CHECK(p.co.X == 1); + CHECK(p.co.Y == 0); + CHECK(p.handle_left.X == 0.5); + CHECK(p.handle_left.Y == 1.0); + CHECK(p.handle_right.X == 0.5); + CHECK(p.handle_right.Y == 0.0); + CHECK(p.interpolation == openshot::InterpolationType::BEZIER); + CHECK(p.handle_type == openshot::HandleType::AUTO); } -TEST(XY_Constructor) +TEST_CASE( "XY_Constructor", "[libopenshot][point]" ) { // Create a point with X and Y values openshot::Point p1(2,9); - CHECK_EQUAL(2, p1.co.X); - CHECK_EQUAL(9, p1.co.Y); - CHECK_EQUAL(openshot::InterpolationType::BEZIER, p1.interpolation); + CHECK(p1.co.X == 2); + CHECK(p1.co.Y == 9); + CHECK(p1.interpolation == openshot::InterpolationType::BEZIER); } -TEST(Pair_Constructor) +TEST_CASE( "Pair_Constructor", "[libopenshot][point]" ) { // Create a point from a std::pair std::pair coordinates(22, 5); openshot::Point p1(coordinates); - CHECK_CLOSE(22.0f, p1.co.X, 0.00001); - CHECK_CLOSE(5.0f, p1.co.Y, 0.00001); + CHECK(p1.co.X == Approx(22.0f).margin(0.00001)); + CHECK(p1.co.Y == Approx(5.0f).margin(0.00001)); } -TEST(Constructor_With_Coordinate) +TEST_CASE( "Constructor_With_Coordinate", "[libopenshot][point]" ) { // Create a point with a coordinate openshot::Coordinate c1(3,7); openshot::Point p1(c1); - CHECK_CLOSE(3.0f, p1.co.X, 0.00001); - CHECK_CLOSE(7.0f, p1.co.Y, 0.00001); - CHECK_EQUAL(openshot::InterpolationType::BEZIER, p1.interpolation); + CHECK(p1.co.X == Approx(3.0f).margin(0.00001)); + CHECK(p1.co.Y == Approx(7.0f).margin(0.00001)); + CHECK(p1.interpolation == openshot::InterpolationType::BEZIER); } -TEST(Constructor_With_Coordinate_And_LINEAR_Interpolation) +TEST_CASE( "Constructor_With_Coordinate_And_LINEAR_Interpolation", "[libopenshot][point]" ) { // Create a point with a coordinate and interpolation openshot::Coordinate c1(3,9); auto interp = openshot::InterpolationType::LINEAR; openshot::Point p1(c1, interp); - CHECK_EQUAL(3, c1.X); - CHECK_EQUAL(9, c1.Y); - CHECK_EQUAL(openshot::InterpolationType::LINEAR, p1.interpolation); + CHECK(c1.X == 3); + CHECK(c1.Y == 9); + CHECK(p1.interpolation == openshot::InterpolationType::LINEAR); } -TEST(Constructor_With_Coordinate_And_BEZIER_Interpolation) +TEST_CASE( "Constructor_With_Coordinate_And_BEZIER_Interpolation", "[libopenshot][point]" ) { // Create a point with a coordinate and interpolation openshot::Coordinate c1(3,9); auto interp = openshot::InterpolationType::BEZIER; openshot::Point p1(c1, interp); - CHECK_EQUAL(3, p1.co.X); - CHECK_EQUAL(9, p1.co.Y); - CHECK_EQUAL(openshot::InterpolationType::BEZIER, p1.interpolation); + CHECK(p1.co.X == 3); + CHECK(p1.co.Y == 9); + CHECK(p1.interpolation == openshot::InterpolationType::BEZIER); } -TEST(Constructor_With_Coordinate_And_CONSTANT_Interpolation) +TEST_CASE( "Constructor_With_Coordinate_And_CONSTANT_Interpolation", "[libopenshot][point]" ) { // Create a point with a coordinate and interpolation openshot::Coordinate c1(2,8); auto interp = openshot::InterpolationType::CONSTANT; openshot::Point p1(c1, interp); - CHECK_EQUAL(2, p1.co.X); - CHECK_EQUAL(8, p1.co.Y); - CHECK_EQUAL(openshot::InterpolationType::CONSTANT, p1.interpolation); + CHECK(p1.co.X == 2); + CHECK(p1.co.Y == 8); + CHECK(p1.interpolation == openshot::InterpolationType::CONSTANT); } -TEST(Constructor_With_Coordinate_And_BEZIER_And_AUTO_Handle) +TEST_CASE( "Constructor_With_Coordinate_And_BEZIER_And_AUTO_Handle", "[libopenshot][point]" ) { // Create a point with a coordinate and interpolation openshot::Coordinate c1(3,9); @@ -127,13 +125,13 @@ TEST(Constructor_With_Coordinate_And_BEZIER_And_AUTO_Handle) openshot::InterpolationType::BEZIER, openshot::HandleType::AUTO); - CHECK_EQUAL(3, p1.co.X); - CHECK_EQUAL(9, p1.co.Y); - CHECK_EQUAL(openshot::InterpolationType::BEZIER, p1.interpolation); - CHECK_EQUAL(openshot::HandleType::AUTO, p1.handle_type); + CHECK(p1.co.X == 3); + CHECK(p1.co.Y == 9); + CHECK(p1.interpolation == openshot::InterpolationType::BEZIER); + CHECK(p1.handle_type == openshot::HandleType::AUTO); } -TEST(Constructor_With_Coordinate_And_BEZIER_And_MANUAL_Handle) +TEST_CASE( "Constructor_With_Coordinate_And_BEZIER_And_MANUAL_Handle", "[libopenshot][point]" ) { // Create a point with a coordinate and interpolation openshot::Coordinate c1(3,9); @@ -141,26 +139,31 @@ TEST(Constructor_With_Coordinate_And_BEZIER_And_MANUAL_Handle) openshot::InterpolationType::BEZIER, openshot::HandleType::MANUAL); - CHECK_EQUAL(3, p1.co.X); - CHECK_EQUAL(9, p1.co.Y); - CHECK_EQUAL(openshot::InterpolationType::BEZIER, p1.interpolation); - CHECK_EQUAL(openshot::HandleType::MANUAL, p1.handle_type); + CHECK(p1.co.X == 3); + CHECK(p1.co.Y == 9); + CHECK(p1.interpolation == openshot::InterpolationType::BEZIER); + CHECK(p1.handle_type == openshot::HandleType::MANUAL); } -TEST(Json) +TEST_CASE( "Json", "[libopenshot][point]" ) { openshot::Point p1; openshot::Point p2(1, 0); auto json1 = p1.Json(); auto json2 = p2.JsonValue(); auto json_string2 = json2.toStyledString(); - CHECK_EQUAL(json1, json_string2); + CHECK(json_string2 == json1); } -TEST(SetJson) +TEST_CASE( "SetJson", "[libopenshot][point]" ) { openshot::Point p1; std::stringstream json_stream; + + // A string that's not JSON should cause an exception + CHECK_THROWS_AS(p1.SetJson("}{"), openshot::InvalidJSON); + + // Build a valid JSON string for Point settings json_stream << R"json( { "co": { "X": 1.0, "Y": 0.0 }, @@ -174,13 +177,12 @@ TEST(SetJson) json_stream << R"json( } )json"; + p1.SetJson(json_stream.str()); - CHECK_EQUAL(2.0, p1.handle_left.X); - CHECK_EQUAL(3.0, p1.handle_left.Y); - CHECK_EQUAL(4.0, p1.handle_right.X); - CHECK_EQUAL(-2.0, p1.handle_right.Y); - CHECK_EQUAL(openshot::HandleType::MANUAL, p1.handle_type); - CHECK_EQUAL(openshot::InterpolationType::CONSTANT, p1.interpolation); + CHECK(p1.handle_left.X == 2.0); + CHECK(p1.handle_left.Y == 3.0); + CHECK(p1.handle_right.X == 4.0); + CHECK(p1.handle_right.Y == -2.0); + CHECK(p1.handle_type == openshot::HandleType::MANUAL); + CHECK(p1.interpolation == openshot::InterpolationType::CONSTANT); } - -} // SUITE diff --git a/tests/QtImageReader_Tests.cpp b/tests/QtImageReader.cpp similarity index 75% rename from tests/QtImageReader_Tests.cpp rename to tests/QtImageReader.cpp index 2fd78d5e4..6bfeebe4e 100644 --- a/tests/QtImageReader_Tests.cpp +++ b/tests/QtImageReader.cpp @@ -28,39 +28,38 @@ * along with OpenShot Library. If not, see . */ -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 -#include "QGuiApplication" -#include "OpenShot.h" +#include -using namespace std; -using namespace openshot; +#include -SUITE(QtImageReader) -{ +#include "QtImageReader.h" +#include "Frame.h" +#include "Clip.h" +#include "Timeline.h" + +using namespace openshot; -TEST(Default_Constructor) +TEST_CASE( "Default_Constructor", "[libopenshot][qtimagereader]" ) { // Check invalid path - CHECK_THROW(QtImageReader(""), InvalidFile); + CHECK_THROWS_AS(QtImageReader(""), InvalidFile); } -TEST(GetFrame_Before_Opening) +TEST_CASE( "GetFrame_Before_Opening", "[libopenshot][qtimagereader]" ) { // Create a reader - stringstream path; + std::stringstream path; path << TEST_MEDIA_PATH << "front.png"; QtImageReader r(path.str()); // Check invalid path - CHECK_THROW(r.GetFrame(1), ReaderClosed); + CHECK_THROWS_AS(r.GetFrame(1), ReaderClosed); } -TEST(Check_SVG_Loading) +TEST_CASE( "Check_SVG_Loading", "[libopenshot][qtimagereader]" ) { // Create a reader - stringstream path; + std::stringstream path; path << TEST_MEDIA_PATH << "1F0CF.svg"; QtImageReader r(path.str()); r.Open(); @@ -68,8 +67,8 @@ TEST(Check_SVG_Loading) // Get frame, with no Timeline or Clip // Default SVG scaling sizes things to 1920x1080 std::shared_ptr f = r.GetFrame(1); - CHECK_EQUAL(1080, f->GetImage()->width()); - CHECK_EQUAL(1080, f->GetImage()->height()); + CHECK(f->GetImage()->width() == 1080); + CHECK(f->GetImage()->height() == 1080); Fraction fps(30000,1000); Timeline t1(640, 480, fps, 44100, 2, LAYOUT_STEREO); @@ -87,21 +86,18 @@ TEST(Check_SVG_Loading) // Should scale to 480 clip1.Reader()->Open(); f = clip1.Reader()->GetFrame(2); - CHECK_EQUAL(480, f->GetImage()->width()); - CHECK_EQUAL(480, f->GetImage()->height()); + CHECK(f->GetImage()->width() == 480); + CHECK(f->GetImage()->height() == 480); // Add scale_x and scale_y. Should scale the square SVG // by the largest scale keyframe (i.e. 4) clip1.scale_x.AddPoint(1.0, 2.0, openshot::LINEAR); clip1.scale_y.AddPoint(1.0, 2.0, openshot::LINEAR); f = clip1.Reader()->GetFrame(3); - CHECK_EQUAL(480 * 2, f->GetImage()->width()); - CHECK_EQUAL(480 * 2, f->GetImage()->height()); + CHECK(f->GetImage()->width() == 480 * 2); + CHECK(f->GetImage()->height() == 480 * 2); // Close reader t1.Close(); r.Close(); } - -} // SUITE(QtImageReader) - diff --git a/tests/ReaderBase_Tests.cpp b/tests/ReaderBase.cpp similarity index 74% rename from tests/ReaderBase_Tests.cpp rename to tests/ReaderBase.cpp index dec61efd9..94880e004 100644 --- a/tests/ReaderBase_Tests.cpp +++ b/tests/ReaderBase.cpp @@ -29,65 +29,67 @@ */ #include +#include + +#include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 #include "ReaderBase.h" #include "CacheBase.h" #include "Frame.h" #include "Json.h" -using namespace std; using namespace openshot; // Since it is not possible to instantiate an abstract class, this test creates // a new derived class, in order to test the base class file info struct. -TEST(ReaderBase_Derived_Class) +TEST_CASE( "ReaderBase_Derived_Class", "[libopenshot][readerbase]" ) { // Create a new derived class from type ReaderBase class TestReader : public ReaderBase { public: TestReader() { }; - CacheBase* GetCache() { return NULL; }; + CacheBase* GetCache() { return nullptr; }; std::shared_ptr GetFrame(int64_t number) { std::shared_ptr f(new Frame()); return f; } void Close() { }; void Open() { }; - string Json() const { return ""; }; - void SetJson(string value) { }; + std::string Json() const { return ""; }; + void SetJson(std::string value) { }; Json::Value JsonValue() const { return Json::Value("{}"); }; void SetJsonValue(Json::Value root) { }; bool IsOpen() { return true; }; - string Name() { return "TestReader"; }; + std::string Name() { return "TestReader"; }; }; // Create an instance of the derived class TestReader t1; // Validate the new class - CHECK_EQUAL("TestReader", t1.Name()); + CHECK(t1.Name() == "TestReader"); t1.Close(); t1.Open(); - CHECK_EQUAL(true, t1.IsOpen()); + CHECK(t1.IsOpen() == true); - CHECK_EQUAL(true, t1.GetCache() == NULL); + CHECK(t1.GetCache() == nullptr); t1.SetJson("{ }"); t1.SetJsonValue(Json::Value("{}")); - CHECK_EQUAL("", t1.Json()); + CHECK(t1.Json() == ""); auto json = t1.JsonValue(); - CHECK_EQUAL(json, Json::Value("{}")); + CHECK(Json::Value("{}") == json); auto f = t1.GetFrame(1); + REQUIRE(f != nullptr); + CHECK(f->number == 1); + // Check some of the default values of the FileInfo struct on the base class - CHECK_EQUAL(false, t1.info.has_audio); - CHECK_EQUAL(false, t1.info.has_audio); - CHECK_CLOSE(0.0f, t1.info.duration, 0.00001); - CHECK_EQUAL(0, t1.info.height); - CHECK_EQUAL(0, t1.info.width); - CHECK_EQUAL(1, t1.info.fps.num); - CHECK_EQUAL(1, t1.info.fps.den); + CHECK_FALSE(t1.info.has_audio); + CHECK_FALSE(t1.info.has_audio); + CHECK(t1.info.duration == Approx(0.0f).margin(0.00001)); + CHECK(t1.info.height == 0); + CHECK(t1.info.width == 0); + CHECK(t1.info.fps.num == 1); + CHECK(t1.info.fps.den == 1); } diff --git a/tests/Settings_Tests.cpp b/tests/Settings.cpp similarity index 75% rename from tests/Settings_Tests.cpp rename to tests/Settings.cpp index ce5dc2824..e974ffd43 100644 --- a/tests/Settings_Tests.cpp +++ b/tests/Settings.cpp @@ -28,33 +28,31 @@ * along with OpenShot Library. If not, see . */ -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "Settings.h" -using namespace std; using namespace openshot; -TEST(Settings_Default_Constructor) +TEST_CASE( "Default_Constructor", "[libopenshot][settings]" ) { // Create an empty color Settings *s = Settings::Instance(); - CHECK_EQUAL(12, s->OMP_THREADS); - CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); + CHECK(s->OMP_THREADS == 12); + CHECK_FALSE(s->HIGH_QUALITY_SCALING); } -TEST(Settings_Change_Settings) +TEST_CASE( "Change_Settings", "[libopenshot][settings]" ) { // Create an empty color Settings *s = Settings::Instance(); s->OMP_THREADS = 8; s->HIGH_QUALITY_SCALING = true; - CHECK_EQUAL(8, s->OMP_THREADS); - CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); + CHECK(s->OMP_THREADS == 8); + CHECK(s->HIGH_QUALITY_SCALING == true); - CHECK_EQUAL(8, Settings::Instance()->OMP_THREADS); - CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); + CHECK(Settings::Instance()->OMP_THREADS == 8); + CHECK(Settings::Instance()->HIGH_QUALITY_SCALING == true); } diff --git a/tests/Timeline_Tests.cpp b/tests/Timeline.cpp similarity index 56% rename from tests/Timeline_Tests.cpp rename to tests/Timeline.cpp index 2a9cd632e..255727c62 100644 --- a/tests/Timeline_Tests.cpp +++ b/tests/Timeline.cpp @@ -28,13 +28,13 @@ * along with OpenShot Library. If not, see . */ +#include #include #include #include -#include "UnitTest++.h" -// Prevent name clashes with juce::UnitTest -#define DONT_SET_USING_JUCE_NAMESPACE 1 +#include + #include "Timeline.h" #include "Clip.h" #include "Frame.h" @@ -42,75 +42,88 @@ #include "effects/Blur.h" #include "effects/Negate.h" -using namespace std; using namespace openshot; -SUITE(Timeline) -{ - -TEST(Constructor) +TEST_CASE( "constructor", "[libopenshot][timeline]" ) { - // Create a default fraction (should be 1/1) Fraction fps(30000,1000); Timeline t1(640, 480, fps, 44100, 2, LAYOUT_STEREO); // Check values - CHECK_EQUAL(640, t1.info.width); - CHECK_EQUAL(480, t1.info.height); + CHECK(t1.info.width == 640); + CHECK(t1.info.height == 480); - // Create a default fraction (should be 1/1) Timeline t2(300, 240, fps, 44100, 2, LAYOUT_STEREO); // Check values - CHECK_EQUAL(300, t2.info.width); - CHECK_EQUAL(240, t2.info.height); + CHECK(t2.info.width == 300); + CHECK(t2.info.height == 240); +} + +TEST_CASE("ReaderInfo constructor", "[libopenshot][timeline]") +{ + // Create a reader + std::stringstream path; + path << TEST_MEDIA_PATH << "test.mp4"; + Clip clip_video(path.str()); + clip_video.Open(); + const auto r1 = clip_video.Reader(); + + // Configure a Timeline with the same parameters + Timeline t1(r1->info); + + CHECK(r1->info.width == t1.info.width); + CHECK(r1->info.height == t1.info.height); + CHECK(r1->info.fps.num == t1.info.fps.num); + CHECK(r1->info.fps.den == t1.info.fps.den); + CHECK(r1->info.sample_rate == t1.info.sample_rate); + CHECK(r1->info.channels == t1.info.channels); + CHECK(r1->info.channel_layout == t1.info.channel_layout); } -TEST(Width_and_Height_Functions) +TEST_CASE( "width and height functions", "[libopenshot][timeline]" ) { - // Create a default fraction (should be 1/1) Fraction fps(30000,1000); Timeline t1(640, 480, fps, 44100, 2, LAYOUT_STEREO); // Check values - CHECK_EQUAL(640, t1.info.width); - CHECK_EQUAL(480, t1.info.height); + CHECK(t1.info.width == 640); + CHECK(t1.info.height == 480); // Set width t1.info.width = 600; // Check values - CHECK_EQUAL(600, t1.info.width); - CHECK_EQUAL(480, t1.info.height); + CHECK(t1.info.width == 600); + CHECK(t1.info.height == 480); // Set height t1.info.height = 400; // Check values - CHECK_EQUAL(600, t1.info.width); - CHECK_EQUAL(400, t1.info.height); + CHECK(t1.info.width == 600); + CHECK(t1.info.height == 400); } -TEST(Framerate) +TEST_CASE( "Framerate", "[libopenshot][timeline]" ) { - // Create a default fraction (should be 1/1) Fraction fps(24,1); Timeline t1(640, 480, fps, 44100, 2, LAYOUT_STEREO); // Check values - CHECK_CLOSE(24.0f, t1.info.fps.ToFloat(), 0.00001); + CHECK(t1.info.fps.ToFloat() == Approx(24.0f).margin(0.00001)); } -TEST(Check_Two_Track_Video) +TEST_CASE( "two-track video", "[libopenshot][timeline]" ) { // Create a reader - stringstream path; + std::stringstream path; path << TEST_MEDIA_PATH << "test.mp4"; Clip clip_video(path.str()); clip_video.Layer(0); clip_video.Position(0.0); - stringstream path_overlay; + std::stringstream path_overlay; path_overlay << TEST_MEDIA_PATH << "front3.png"; Clip clip_overlay(path_overlay.str()); clip_overlay.Layer(1); @@ -124,10 +137,8 @@ TEST(Check_Two_Track_Video) t.AddClip(&clip_video); t.AddClip(&clip_overlay); - // Open Timeline t.Open(); - // Get frame std::shared_ptr f = t.GetFrame(1); // Get the image data @@ -135,124 +146,99 @@ TEST(Check_Two_Track_Video) int pixel_index = 230 * 4; // pixel 230 (4 bytes per pixel) // Check image properties - CHECK_CLOSE(21, (int)f->GetPixels(pixel_row)[pixel_index], 5); - CHECK_CLOSE(191, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); - CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); + CHECK((int)f->GetPixels(pixel_row)[pixel_index] == Approx(21).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 1] == Approx(191).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 2] == Approx(0).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 3] == Approx(255).margin(5)); - // Get frame f = t.GetFrame(2); // Check image properties - CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 5); - CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); - CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); + CHECK((int)f->GetPixels(pixel_row)[pixel_index] == Approx(176).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 1] == Approx(0).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 2] == Approx(186).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 3] == Approx(255).margin(5)); - // Get frame f = t.GetFrame(3); // Check image properties - CHECK_CLOSE(23, (int)f->GetPixels(pixel_row)[pixel_index], 5); - CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); - CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); + CHECK((int)f->GetPixels(pixel_row)[pixel_index] == Approx(23).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 1] == Approx(190).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 2] == Approx(0).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 3] == Approx(255).margin(5)); - // Get frame f = t.GetFrame(24); // Check image properties - CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index], 5); - CHECK_CLOSE(106, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); - CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); + CHECK((int)f->GetPixels(pixel_row)[pixel_index] == Approx(186).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 1] == Approx(106).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 2] == Approx(0).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 3] == Approx(255).margin(5)); - // Get frame f = t.GetFrame(5); // Check image properties - CHECK_CLOSE(23, (int)f->GetPixels(pixel_row)[pixel_index], 5); - CHECK_CLOSE(190, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); - CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); + CHECK((int)f->GetPixels(pixel_row)[pixel_index] == Approx(23).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 1] == Approx(190).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 2] == Approx(0).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 3] == Approx(255).margin(5)); - // Get frame f = t.GetFrame(25); // Check image properties - CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index], 5); - CHECK_CLOSE(94, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); - CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); + CHECK((int)f->GetPixels(pixel_row)[pixel_index] == Approx(0).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 1] == Approx(94).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 2] == Approx(186).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 3] == Approx(255).margin(5)); - // Get frame f = t.GetFrame(4); // Check image properties - CHECK_CLOSE(176, (int)f->GetPixels(pixel_row)[pixel_index], 5); - CHECK_CLOSE(0, (int)f->GetPixels(pixel_row)[pixel_index + 1], 5); - CHECK_CLOSE(186, (int)f->GetPixels(pixel_row)[pixel_index + 2], 5); - CHECK_CLOSE(255, (int)f->GetPixels(pixel_row)[pixel_index + 3], 5); + CHECK((int)f->GetPixels(pixel_row)[pixel_index] == Approx(176).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 1] == Approx(0).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 2] == Approx(186).margin(5)); + CHECK((int)f->GetPixels(pixel_row)[pixel_index + 3] == Approx(255).margin(5)); - // Close reader t.Close(); } -TEST(Clip_Order) +TEST_CASE( "Clip order", "[libopenshot][timeline]" ) { // Create a timeline Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); // Add some clips out of order - stringstream path_top; + std::stringstream path_top; path_top << TEST_MEDIA_PATH << "front3.png"; Clip clip_top(path_top.str()); clip_top.Layer(2); t.AddClip(&clip_top); - stringstream path_middle; + std::stringstream path_middle; path_middle << TEST_MEDIA_PATH << "front.png"; Clip clip_middle(path_middle.str()); clip_middle.Layer(0); t.AddClip(&clip_middle); - stringstream path_bottom; + std::stringstream path_bottom; path_bottom << TEST_MEDIA_PATH << "back.png"; Clip clip_bottom(path_bottom.str()); clip_bottom.Layer(1); t.AddClip(&clip_bottom); - // Open Timeline t.Open(); // Loop through Clips and check order (they should have been sorted into the correct order) // Bottom layer to top layer, then by position. - list::iterator clip_itr; - list clips = t.Clips(); - int counter = 0; - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) - { - // Get clip object from the iterator - Clip *clip = (*clip_itr); - - switch (counter) { - case 0: - CHECK_EQUAL(0, clip->Layer()); - break; - case 1: - CHECK_EQUAL(1, clip->Layer()); - break; - case 2: - CHECK_EQUAL(2, clip->Layer()); - break; - } - - // increment counter - counter++; + std::list clips = t.Clips(); + int n = 0; + for (auto clip : clips) { + CHECK(clip->Layer() == n); + ++n; } // Add another clip - stringstream path_middle1; + std::stringstream path_middle1; path_middle1 << TEST_MEDIA_PATH << "interlaced.png"; Clip clip_middle1(path_middle1.str()); clip_middle1.Layer(1); @@ -260,40 +246,33 @@ TEST(Clip_Order) t.AddClip(&clip_middle1); // Loop through clips again, and re-check order - counter = 0; clips = t.Clips(); - for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr) - { - // Get clip object from the iterator - Clip *clip = (*clip_itr); - - switch (counter) { + n = 0; + for (auto clip : clips) { + switch (n) { case 0: - CHECK_EQUAL(0, clip->Layer()); + CHECK(clip->Layer() == 0); break; case 1: - CHECK_EQUAL(1, clip->Layer()); - CHECK_CLOSE(0.0, clip->Position(), 0.0001); + CHECK(clip->Layer() == 1); + CHECK(clip->Position() == Approx(0.0).margin(0.0001)); break; case 2: - CHECK_EQUAL(1, clip->Layer()); - CHECK_CLOSE(0.5, clip->Position(), 0.0001); + CHECK(clip->Layer() == 1); + CHECK(clip->Position() == Approx(0.5).margin(0.0001)); break; case 3: - CHECK_EQUAL(2, clip->Layer()); + CHECK(clip->Layer() == 2); break; } - - // increment counter - counter++; + ++n; } - // Close reader t.Close(); } -TEST(Effect_Order) +TEST_CASE( "Effect order", "[libopenshot][timeline]" ) { // Create a timeline Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); @@ -314,39 +293,27 @@ TEST(Effect_Order) effect_bottom.Layer(1); t.AddEffect(&effect_bottom); - // Open Timeline t.Open(); // Loop through effects and check order (they should have been sorted into the correct order) // Bottom layer to top layer, then by position, and then by order. - list::iterator effect_itr; - list effects = t.Effects(); - int counter = 0; - for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) - { - // Get clip object from the iterator - EffectBase *effect = (*effect_itr); - - switch (counter) { + std::list effects = t.Effects(); + int n = 0; + for (auto effect : effects) { + CHECK(effect->Layer() == n); + CHECK(effect->Order() == 0); + switch (n) { case 0: - CHECK_EQUAL(0, effect->Layer()); - CHECK_EQUAL("A", effect->Id()); - CHECK_EQUAL(0, effect->Order()); + CHECK(effect->Id() == "A"); break; case 1: - CHECK_EQUAL(1, effect->Layer()); - CHECK_EQUAL("B", effect->Id()); - CHECK_EQUAL(0, effect->Order()); + CHECK(effect->Id() == "B"); break; case 2: - CHECK_EQUAL(2, effect->Layer()); - CHECK_EQUAL("C", effect->Id()); - CHECK_EQUAL(0, effect->Order()); + CHECK(effect->Id() == "C"); break; } - - // increment counter - counter++; + ++n; } // Add some more effects out of order @@ -374,66 +341,59 @@ TEST(Effect_Order) // Loop through effects again, and re-check order effects = t.Effects(); - counter = 0; - for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) - { - // Get clip object from the iterator - EffectBase *effect = (*effect_itr); - - switch (counter) { + n = 0; + for (auto effect : effects) { + switch (n) { case 0: - CHECK_EQUAL(0, effect->Layer()); - CHECK_EQUAL("A", effect->Id()); - CHECK_EQUAL(0, effect->Order()); + CHECK(effect->Layer() == 0); + CHECK(effect->Id() == "A"); + CHECK(effect->Order() == 0); break; case 1: - CHECK_EQUAL(1, effect->Layer()); - CHECK_EQUAL("B-1", effect->Id()); - CHECK_CLOSE(0.0, effect->Position(), 0.0001); - CHECK_EQUAL(3, effect->Order()); + CHECK(effect->Layer() == 1); + CHECK(effect->Id() == "B-1"); + CHECK(effect->Position() == Approx(0.0).margin(0.0001)); + CHECK(effect->Order() == 3); break; case 2: - CHECK_EQUAL(1, effect->Layer()); - CHECK_EQUAL("B", effect->Id()); - CHECK_CLOSE(0.0, effect->Position(), 0.0001); - CHECK_EQUAL(0, effect->Order()); + CHECK(effect->Layer() == 1); + CHECK(effect->Id() == "B"); + CHECK(effect->Position() == Approx(0.0).margin(0.0001)); + CHECK(effect->Order() == 0); break; case 3: - CHECK_EQUAL(1, effect->Layer()); - CHECK_EQUAL("B-2", effect->Id()); - CHECK_CLOSE(0.5, effect->Position(), 0.0001); - CHECK_EQUAL(2, effect->Order()); + CHECK(effect->Layer() == 1); + CHECK(effect->Id() == "B-2"); + CHECK(effect->Position() == Approx(0.5).margin(0.0001)); + CHECK(effect->Order() == 2); break; case 4: - CHECK_EQUAL(1, effect->Layer()); - CHECK_EQUAL("B-3", effect->Id()); - CHECK_CLOSE(0.5, effect->Position(), 0.0001); - CHECK_EQUAL(1, effect->Order()); + CHECK(effect->Layer() == 1); + CHECK(effect->Id() == "B-3"); + CHECK(effect->Position() == Approx(0.5).margin(0.0001)); + CHECK(effect->Order() == 1); break; case 5: - CHECK_EQUAL(2, effect->Layer()); - CHECK_EQUAL("C", effect->Id()); - CHECK_EQUAL(0, effect->Order()); + CHECK(effect->Layer() == 2); + CHECK(effect->Id() == "C"); + CHECK(effect->Order() == 0); break; } - - // increment counter - counter++; + ++n; } - // Close reader t.Close(); } -TEST(GetClip_by_id) +TEST_CASE( "GetClip by id", "[libopenshot][timeline]" ) { Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); - stringstream path1; + std::stringstream path1; path1 << TEST_MEDIA_PATH << "interlaced.png"; auto media_path1 = path1.str(); - stringstream path2; + std::stringstream path2; path2 << TEST_MEDIA_PATH << "front.png"; auto media_path2 = path2.str(); @@ -453,26 +413,26 @@ TEST(GetClip_by_id) // We explicitly want to get returned a Clip*, here Clip* matched = t.GetClip(clip1_id); - CHECK_EQUAL(clip1_id, matched->Id()); - CHECK_EQUAL(1, matched->Layer()); + CHECK(matched->Id() == clip1_id); + CHECK(matched->Layer() == 1); Clip* matched2 = t.GetClip(clip2_id); - CHECK_EQUAL(clip2_id, matched2->Id()); - CHECK_EQUAL(false, matched2->Layer() < 2); + CHECK(matched2->Id() == clip2_id); + CHECK_FALSE(matched2->Layer() < 2); Clip* matched3 = t.GetClip("BAD_ID"); - CHECK_EQUAL(true, matched3 == nullptr); + CHECK(matched3 == nullptr); // Ensure we can access the Clip API interfaces after lookup - CHECK_EQUAL(false, matched->Waveform()); - CHECK_EQUAL(true, matched2->Waveform()); + CHECK_FALSE(matched->Waveform()); + CHECK(matched2->Waveform() == true); } -TEST(GetClipEffect_by_id) +TEST_CASE( "GetClipEffect by id", "[libopenshot][timeline]" ) { Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); - stringstream path1; + std::stringstream path1; path1 << TEST_MEDIA_PATH << "interlaced.png"; auto media_path1 = path1.str(); @@ -514,21 +474,21 @@ TEST(GetClipEffect_by_id) // Check that we can look up clip1's effect auto match1 = t.GetClipEffect("EFFECT00011"); - CHECK_EQUAL(blur1_id, match1->Id()); + CHECK(match1->Id() == blur1_id); // clip2 hasn't been added yet, shouldn't be found match1 = t.GetClipEffect(blur2_id); - CHECK_EQUAL(true, match1 == nullptr); + CHECK(match1 == nullptr); t.AddClip(&clip2); // Check that blur2 can now be found via clip2 match1 = t.GetClipEffect(blur2_id); - CHECK_EQUAL(blur2_id, match1->Id()); - CHECK_EQUAL(2, match1->Layer()); + CHECK(match1->Id() == blur2_id); + CHECK(match1->Layer() == 2); } -TEST(GetEffect_by_id) +TEST_CASE( "GetEffect by id", "[libopenshot][timeline]" ) { Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); @@ -544,19 +504,19 @@ TEST(GetEffect_by_id) t.AddEffect(&blur1); auto match1 = t.GetEffect(blur1_id); - CHECK_EQUAL(blur1_id, match1->Id()); - CHECK_EQUAL(1, match1->Layer()); + CHECK(match1->Id() == blur1_id); + CHECK(match1->Layer() == 1); match1 = t.GetEffect("NOSUCHNAME"); - CHECK_EQUAL(true, match1 == nullptr); + CHECK(match1 == nullptr); } -TEST(Effect_Blur) +TEST_CASE( "Effect: Blur", "[libopenshot][timeline]" ) { // Create a timeline Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); - stringstream path_top; + std::stringstream path_top; path_top << TEST_MEDIA_PATH << "interlaced.png"; Clip clip_top(path_top.str()); clip_top.Layer(2); @@ -578,16 +538,19 @@ TEST(Effect_Blur) // Get frame std::shared_ptr f = t.GetFrame(1); + REQUIRE(f != nullptr); + CHECK(f->number == 1); + // Close reader t.Close(); } -TEST(GetMaxFrame_GetMaxTime) +TEST_CASE( "GetMaxFrame and GetMaxTime", "[libopenshot][timeline]" ) { // Create a timeline Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); - stringstream path1; + std::stringstream path1; path1 << TEST_MEDIA_PATH << "interlaced.png"; Clip clip1(path1.str()); clip1.Layer(1); @@ -595,8 +558,8 @@ TEST(GetMaxFrame_GetMaxTime) clip1.End(45); t.AddClip(&clip1); - CHECK_CLOSE(95.0, t.GetMaxTime(), 0.001); - CHECK_EQUAL(95 * 30 + 1, t.GetMaxFrame()); + CHECK(t.GetMaxTime() == Approx(95.0).margin(0.001)); + CHECK(t.GetMaxFrame() == 95 * 30 + 1); Clip clip2(path1.str()); clip2.Layer(2); @@ -604,16 +567,14 @@ TEST(GetMaxFrame_GetMaxTime) clip2.End(55); t.AddClip(&clip2); - CHECK_EQUAL(95 * 30 + 1, t.GetMaxFrame()); - CHECK_CLOSE(95.0, t.GetMaxTime(), 0.001); + CHECK(t.GetMaxFrame() == 95 * 30 + 1); + CHECK(t.GetMaxTime() == Approx(95.0).margin(0.001)); clip2.Position(100); clip1.Position(80); - CHECK_EQUAL(155 * 30 + 1, t.GetMaxFrame()); - CHECK_CLOSE(155.0, t.GetMaxTime(), 0.001); + CHECK(t.GetMaxFrame() == 155 * 30 + 1); + CHECK(t.GetMaxTime() == Approx(155.0).margin(0.001)); t.RemoveClip(&clip2); - CHECK_EQUAL(125 * 30 + 1, t.GetMaxFrame()); - CHECK_CLOSE(125.0, t.GetMaxTime(), 0.001); + CHECK(t.GetMaxFrame() == 125 * 30 + 1); + CHECK(t.GetMaxTime() == Approx(125.0).margin(0.001)); } - -} // SUITE diff --git a/tests/tests.cpp b/tests/catch_main.cpp similarity index 75% rename from tests/tests.cpp rename to tests/catch_main.cpp index 20d5fd331..1d97a17b0 100644 --- a/tests/tests.cpp +++ b/tests/catch_main.cpp @@ -28,23 +28,6 @@ * along with OpenShot Library. If not, see . */ -#include -#include "UnitTest++.h" +#define CATCH_CONFIG_MAIN +#include -using namespace std; -using namespace UnitTest; - -int main() -{ - int exit_code = 0; - cout << "----------------------------" << endl; - cout << " RUNNING ALL TESTS" << endl; - cout << "----------------------------" << endl; - - // Run all unit tests - exit_code = RunAllTests(); - - cout << "----------------------------" << endl; - - return exit_code; -}