Skip to content

Commit

Permalink
Merge branch 'master' into vectortypes
Browse files Browse the repository at this point in the history
  • Loading branch information
Mytherin committed Feb 4, 2020
2 parents 5d8ff90 + 3391be8 commit 90ad9e7
Show file tree
Hide file tree
Showing 106 changed files with 32,847 additions and 13,987 deletions.
35 changes: 25 additions & 10 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,10 @@ matrix:
script:
- (mkdir -p build/release && cd build/release && cmake -DCMAKE_BUILD_TYPE=Release ../.. && cmake --build .)
- build/release/test/unittest
- python3 tools/shell/shell-test.py build/release/duckdb_cli

after_success:
- python tools/upload-s3.py lib-osx build/release/src/libduckdb.dylib build/release/src/libduckdb_static.a
- python tools/upload-s3.py lib-osx build/release/src/libduckdb.dylib build/release/src/libduckdb_static.a build/release/duckdb_cli


- os: linux
Expand Down Expand Up @@ -110,15 +111,17 @@ matrix:


- os: linux
dist: xenial
dist: bionic
name: GCC 9
python: 3.7

addons:
apt:
sources:
- ubuntu-toolchain-r-test
- sourceline: 'ppa:ubuntu-toolchain-r/test'
- sourceline: 'ppa:deadsnakes/ppa'
packages:
- g++-9
- g++-9 python3.7
env:
- MATRIX_EVAL="CC=gcc-9 && CXX=g++-9"
before_install:
Expand All @@ -128,12 +131,14 @@ matrix:
- mkdir -p build/release
- (cd build/release && cmake -DCMAKE_BUILD_TYPE=Release ../.. && cmake --build .)
- build/release/test/unittest "*"
- python3.7 tools/shell/shell-test.py build/release/duckdb_cli
- (cd examples/embedded-c; make)
- (cd examples/embedded-c++; make)
- (cd examples/programmatic-querying; make)


after_success:
- python tools/upload-s3.py lib-linux-amd64 build/release/src/libduckdb*.so build/release/src/libduckdb*.a
- python tools/upload-s3.py lib-linux-amd64 build/release/src/libduckdb*.so build/release/src/libduckdb*.a build/release/duckdb_cli


- os: linux
Expand All @@ -158,7 +163,7 @@ matrix:
- build/release/test/unittest "*"

after_success:
- python tools/upload-s3.py lib-linux-i386 build/release/src/libduckdb*.so build/release/src/libduckdb*.a
- python tools/upload-s3.py lib-linux-i386 build/release/src/libduckdb*.so build/release/src/libduckdb*.a build/release/duckdb_cli


- os: linux
Expand Down Expand Up @@ -207,10 +212,12 @@ matrix:
- cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=x64
- cmake --build . --target duckdb --config Release
- cmake --build . --target unittest --config Release
- cmake --build . --target shell --config Release
- test/Release/unittest.exe
- C:/Python37-x64/python.exe tools/shell/shell-test.py Release/duckdb_cli.exe

after_success:
- C:/Python37-x64/python.exe tools/upload-s3.py lib-windows-amd64 src/Release/duckdb.dll
- C:/Python37-x64/python.exe tools/upload-s3.py lib-windows-amd64 src/Release/duckdb.dll Release/duckdb_cli.exe


- os: windows
Expand All @@ -227,10 +234,12 @@ matrix:
- cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_GENERATOR_PLATFORM=Win32
- cmake --build . --target duckdb --config Release
- cmake --build . --target unittest --config Release
- cmake --build . --target shell --config Release
- test/Release/unittest.exe
- C:/Python37-x64/python.exe tools/shell/shell-test.py Release/duckdb_cli.exe

after_success:
- C:/Python37-x64/python.exe tools/upload-s3.py lib-windows-i386 src/Release/duckdb.dll
- C:/Python37-x64/python.exe tools/upload-s3.py lib-windows-i386 src/Release/duckdb.dll Release/duckdb_cli.exe


- os: linux
Expand All @@ -256,6 +265,8 @@ matrix:
- R -f ../../examples/R/dplyr.R

after_success:
- R -e "tools::write_PACKAGES(dir = '.', type = 'source')"
- python ../upload-s3.py rstats/src/contrib duckdb_*.tar.gz PACKAGES*
- python ../upload-s3.py rstats duckdb_*.tar.gz


Expand All @@ -281,6 +292,8 @@ matrix:
- (cd tests && R -f testthat.R)

after_success:
- R -e "tools::write_PACKAGES(dir = '.', type = 'mac.binary')"
- python ../upload-s3.py rstats/bin/macosx/el-capitan/contrib/3.6 duckdb_*.tgz PACKAGES*
- python ../upload-s3.py rstats duckdb_*.tgz


Expand Down Expand Up @@ -309,6 +322,8 @@ matrix:

after_success:
- C:/Python37-x64/python.exe ../upload-s3.py rstats duckdb_*.zip
- C:/Program\ Files/R/R-3.6.0/bin/R.exe -e "tools::write_PACKAGES(dir = '.', type = 'win.binary')"
- C:/Python37-x64/python.exe ../upload-s3.py rstats/bin/windows/contrib/3.6 duckdb_*.zip PACKAGES*


- os: linux
Expand Down Expand Up @@ -380,8 +395,8 @@ matrix:
before_install:
- choco install python3 --version 3.7.3 --sidebyside -y --forcex86 --force --params "/InstallDir:C:\Python37"
- choco install python3 --version 3.7.3 --sidebyside -y --force --params "/InstallDir:C:\Python37-x64"
# - choco install python3 --version 3.6.8 --sidebyside -y --forcex86 --force --params "/InstallDir:C:\Python36"
# - choco install python3 --version 3.6.8 --sidebyside -y --force --params "/InstallDir:C:\Python36-x64"
- choco install python3 --version 3.6.8 --sidebyside -y --force --params "/InstallDir:C:\Python36-x64"
- choco install python3 --version 3.6.8 --sidebyside -y --forcex86 --force --params "/InstallDir:C:\Python36"
- choco install python2 --version 2.7.16 --sidebyside -y --forcex86 --force --params "/InstallDir:C:\Python27"
- choco install python2 --version 2.7.16 --sidebyside -y --force --params "/InstallDir:C:\Python27-x64"
- choco install curl -y --force
Expand Down
12 changes: 11 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,16 @@ if(${EXPLICIT_EXCEPTIONS})
set(CXX_EXTRA "${CXX_EXTRA} -fexceptions")
endif()

execute_process(COMMAND git
log
-1
--format=%h
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE GIT_COMMIT_HASH
OUTPUT_STRIP_TRAILING_WHITESPACE)
add_definitions(-DDUCKDB_SOURCE_ID="\""${GIT_COMMIT_HASH}"\"")


option(AMALGAMATION_BUILD
"Build from the amalgamation files, rather than from the normal sources."
FALSE)
Expand Down Expand Up @@ -201,8 +211,8 @@ add_subdirectory(third_party)

if(NOT LEAN)
add_subdirectory(test)
add_subdirectory(tools)
if(NOT WIN32)
add_subdirectory(tools)
add_subdirectory(benchmark)
endif()
endif()
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ test_compile: # test compilation of individual cpp files
python scripts/amalgamation.py --compile

format:
python scripts/format.py
python3 scripts/format.py

third_party/sqllogictest:
git clone --depth=1 https://github.com/cwida/sqllogictest.git third_party/sqllogictest
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ DuckDB requires [CMake](https://cmake.org) to be installed and a `C++11` complia
Run `make` in the root directory to compile the sources. For development, use `make debug` to build a non-optimized debug version. You may run `make unit` and `make allunit` to verify that your version works properly after making changes.

# Usage
A command line utility based on `sqlite3` can be found in either `build/release/tools/shell/shell` (release, the default) or `build/debug/tools/shell/shell` (debug).
A command line utility based on `sqlite3` can be found in either `build/release/duckdb_cli` (release, the default) or `build/debug/duckdb_cli` (debug).

# Embedding
As DuckDB is an embedded database, there is no database server to launch or client to connect to a running server. However, the database server can be embedded directly into an application using the C or C++ bindings. The main build process creates the shared library `build/release/src/libduckdb.[so|dylib|dll]` that can be linked against. A static library is built as well.
Expand Down
3 changes: 2 additions & 1 deletion benchmark/expression_reordering/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
include_directories(../../third_party/dbgen/include)
include_directories(../../third_party/sqlite/include)
add_library(duckdb_benchmark_expression_reordering OBJECT
benchmark_expressions.cpp)
benchmark_expressions.cpp
adaptive_reordering.cpp)
set(BENCHMARK_OBJECT_FILES ${BENCHMARK_OBJECT_FILES}
$<TARGET_OBJECTS:duckdb_benchmark_expression_reordering> PARENT_SCOPE)
128 changes: 128 additions & 0 deletions benchmark/expression_reordering/adaptive_reordering.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
#include "benchmark_runner.hpp"
#include "compare_result.hpp"
#include "dbgen.hpp"
#include "duckdb_benchmark_macro.hpp"

using namespace duckdb;
using namespace std;

#define SF 1

DUCKDB_BENCHMARK(AdaptiveStringReorderingAND, "[expression_reordering]")
void Load(DuckDBBenchmarkState *state) override {
// load the data into the tpch schema
tpch::dbgen(SF, state->db);
}
string GetQuery() override {
return "SELECT * FROM lineitem WHERE l_comment LIKE '%' AND l_comment LIKE '%s%' AND l_comment LIKE '%str%';";
}
string VerifyResult(QueryResult *result) override {
if (!result->success) {
return result->error;
}
return string();
}
string BenchmarkInfo() override {
return "Execute adaptive reordering query ...";
}
FINISH_BENCHMARK(AdaptiveStringReorderingAND)


DUCKDB_BENCHMARK(AdaptiveStringReorderingOR, "[expression_reordering]")
void Load(DuckDBBenchmarkState *state) override {
// load the data into the tpch schema
tpch::dbgen(SF, state->db);
}
string GetQuery() override {
return "SELECT * FROM lineitem WHERE l_comment LIKE '%' OR l_comment LIKE '%s%' OR l_comment LIKE '%str%';";
}
string VerifyResult(QueryResult *result) override {
if (!result->success) {
return result->error;
}
return string();
}
string BenchmarkInfo() override {
return "Execute adaptive reordering query ...";
}
FINISH_BENCHMARK(AdaptiveStringReorderingOR)


DUCKDB_BENCHMARK(AdaptiveNumericReorderingAND, "[expression_reordering]")
void Load(DuckDBBenchmarkState *state) override {
// load the data into the tpch schema
tpch::dbgen(SF, state->db);
}
string GetQuery() override {
return "SELECT * FROM lineitem WHERE l_quantity < 11 AND l_shipdate < 727272 AND l_receiptdate < 828282 AND l_tax < 0.05;";
}
string VerifyResult(QueryResult *result) override {
if (!result->success) {
return result->error;
}
return string();
}
string BenchmarkInfo() override {
return "Execute adaptive reordering query ...";
}
FINISH_BENCHMARK(AdaptiveNumericReorderingAND)


DUCKDB_BENCHMARK(AdaptiveNumericReorderingOR, "[expression_reordering]")
void Load(DuckDBBenchmarkState *state) override {
// load the data into the tpch schema
tpch::dbgen(SF, state->db);
}
string GetQuery() override {
return "SELECT * FROM lineitem WHERE l_quantity < 11 OR l_shipdate < 727272 OR l_receiptdate < 828282 OR l_tax < 0.05;";
}
string VerifyResult(QueryResult *result) override {
if (!result->success) {
return result->error;
}
return string();
}
string BenchmarkInfo() override {
return "Execute adaptive reordering query ...";
}
FINISH_BENCHMARK(AdaptiveNumericReorderingOR)


DUCKDB_BENCHMARK(AdaptiveMixedReorderingAND, "[expression_reordering]")
void Load(DuckDBBenchmarkState *state) override {
// load the data into the tpch schema
tpch::dbgen(SF, state->db);
}
string GetQuery() override {
return "SELECT * FROM lineitem WHERE l_returnflag = 'R' AND l_orderkey > 5000 AND l_shipdate > 5;";
}
string VerifyResult(QueryResult *result) override {
if (!result->success) {
return result->error;
}
return string();
}
string BenchmarkInfo() override {
return "Execute adaptive reordering query ...";
}
FINISH_BENCHMARK(AdaptiveMixedReorderingAND)


DUCKDB_BENCHMARK(AdaptiveMixedReorderingOR, "[expression_reordering]")
void Load(DuckDBBenchmarkState *state) override {
// load the data into the tpch schema
tpch::dbgen(SF, state->db);
}
string GetQuery() override {
return "SELECT * FROM lineitem WHERE l_returnflag = 'R' OR l_orderkey > 5000 OR l_shipdate > 5;";
}
string VerifyResult(QueryResult *result) override {
if (!result->success) {
return result->error;
}
return string();
}
string BenchmarkInfo() override {
return "Execute adaptive reordering query ...";
}
FINISH_BENCHMARK(AdaptiveMixedReorderingOR)
4 changes: 4 additions & 0 deletions benchmark/imdb/imdb.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -475,3 +475,7 @@ FINISH_BENCHMARK(IMDBQ112);
DUCKDB_BENCHMARK(IMDBQ113, "[imdb]")
IMDB_QUERY_BODY(113);
FINISH_BENCHMARK(IMDBQ113);

DUCKDB_BENCHMARK(IMDBQ114, "[imdb]")
IMDB_QUERY_BODY(114);
FINISH_BENCHMARK(IMDBQ114);
25 changes: 25 additions & 0 deletions benchmark/tpch/lineitem_aggregate.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -131,3 +131,28 @@ string BenchmarkInfo() override {
return "Execute the query \"" + GetQuery() + "\" on SF1";
}
FINISH_BENCHMARK(LineitemJoinAggregateWithFilter)

DUCKDB_BENCHMARK(TPCHQ1IntKeys, "[aggregate]")
void Load(DuckDBBenchmarkState *state) override {
// load the data into the tpch schema
tpch::dbgen(SF, state->db, DEFAULT_SCHEMA, "_normal");
Connection conn(state->db);
conn.Query("CREATE TABLE lineitem AS select l_orderkey, l_partkey, l_suppkey, l_linenumber, l_quantity, "
"l_extendedprice, l_discount, l_tax, case l_returnflag when 'N' then 0 when 'R' then 1 when 'A' "
"then 2 else NULL end l_returnflag, case l_linestatus when 'F' then 0 when 'O' then 1 else NULL end "
"l_linestatus, l_shipdate, l_commitdate, l_receiptdate, l_shipinstruct, l_shipmode, "
"l_comment from lineitem_normal");
}
string GetQuery() override {
return tpch::get_query(1);
}
string VerifyResult(QueryResult *result) override {
if (!result->success) {
return result->error;
}
return string();
}
string BenchmarkInfo() override {
return "Execute the query \"" + GetQuery() + "\" on SF1";
}
FINISH_BENCHMARK(TPCHQ1IntKeys)
5 changes: 4 additions & 1 deletion src/catalog/catalog_entry/schema_catalog_entry.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,10 @@ void SchemaCatalogEntry::DropSequence(Transaction &transaction, DropInfo *info)

bool SchemaCatalogEntry::CreateIndex(Transaction &transaction, CreateIndexInfo *info) {
auto index = make_unique_base<CatalogEntry, IndexCatalogEntry>(catalog, this, info);
unordered_set<CatalogEntry *> dependencies{this};
unordered_set<CatalogEntry *> dependencies;
if (name != TEMP_SCHEMA) {
dependencies.insert(this);
}
if (!indexes.CreateEntry(transaction, info->index_name, move(index), dependencies)) {
if (!info->if_not_exists) {
throw CatalogException("Index with name \"%s\" already exists!", info->index_name.c_str());
Expand Down
2 changes: 1 addition & 1 deletion src/catalog/dependency_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ void DependencyManager::AddObject(Transaction &transaction, CatalogEntry *object
throw TransactionException("Catalog write-write conflict on create with \"%s\"", object->name.c_str());
}
}
// add the object to the dependents_map of each object that it depents on
// add the object to the dependents_map of each object that it depends on
for (auto &dependency : dependencies) {
dependents_map[dependency].insert(object);
}
Expand Down
4 changes: 4 additions & 0 deletions src/common/enums/logical_operator_type.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,10 @@ string LogicalOperatorToString(LogicalOperatorType type) {
return "EXECUTE";
case LogicalOperatorType::INDEX_SCAN:
return "INDEX_SCAN";
case LogicalOperatorType::RECURSIVE_CTE:
return "REC_CTE";
case LogicalOperatorType::CTE_REF:
return "CTE_SCAN";
case LogicalOperatorType::INVALID:
default:
return "INVALID";
Expand Down
2 changes: 2 additions & 0 deletions src/common/enums/physical_operator_type.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ string PhysicalOperatorToString(PhysicalOperatorType type) {
return "EXPLAIN";
case PhysicalOperatorType::EXECUTE:
return "EXECUTE";
case PhysicalOperatorType::RECURSIVE_CTE:
return "REC_CTE";
case PhysicalOperatorType::INVALID:
default:
return "INVALID";
Expand Down
Loading

0 comments on commit 90ad9e7

Please sign in to comment.