Skip to content

Commit

Permalink
Adding valgrind testing to travis, and some changes to make it pass: (#…
Browse files Browse the repository at this point in the history
…334)

- `DataChunk` now always initialises its data
- Trigonometric functions do not execute on NULL payloads because of undefined behavior
- No more `NULL` values for empty `FROM` clauses, instead an `EmptyTableRef`/`BoundEmptyTableRef` is produced.

Issue #328
  • Loading branch information
hannes committed Nov 5, 2019
1 parent 6d5a67d commit d9dcbdc
Show file tree
Hide file tree
Showing 48 changed files with 301 additions and 61 deletions.
19 changes: 17 additions & 2 deletions .travis.yml
Expand Up @@ -74,6 +74,23 @@ matrix:
- MATRIX_EVAL="CC=gcc-9 && CXX=g++-9"


- os: linux
dist: bionic
name: Valgrind

addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- valgrind

script:
- mkdir -p build/debug
- (cd build/debug && cmake -DCMAKE_BUILD_TYPE=Debug -DENABLE_SANITIZER=FALSE ../.. && cmake --build .)
- valgrind ./build/debug/test/unittest -s "[tpch]"


- os: linux
dist: xenial
name: GCC 9
Expand Down Expand Up @@ -347,5 +364,3 @@ matrix:

after_success:
C:/Python37-x64/python.exe tools/upload-s3.py python tools/pythonpkg/wheelhouse/*.whl


2 changes: 1 addition & 1 deletion CMakeLists.txt
Expand Up @@ -133,4 +133,4 @@ if(NOT LEAN)
add_subdirectory(tools)
add_subdirectory(benchmark)
endif()
endif()
endif()
3 changes: 2 additions & 1 deletion src/common/types/chunk_collection.cpp
Expand Up @@ -81,7 +81,8 @@ static int8_t templated_compare_value(Vector &left_vec, Vector &right_vec, index
return 1;
}

static int8_t compare_value(Vector &left_vec, Vector &right_vec, index_t vector_idx_left, index_t vector_idx_right) {
// return type here is int32 because strcmp() on some platforms returns rather large values
static int32_t compare_value(Vector &left_vec, Vector &right_vec, index_t vector_idx_left, index_t vector_idx_right) {

auto left_null = left_vec.nullmask[vector_idx_left];
auto right_null = right_vec.nullmask[vector_idx_right];
Expand Down
6 changes: 2 additions & 4 deletions src/common/types/data_chunk.cpp
Expand Up @@ -25,7 +25,7 @@ void DataChunk::InitializeEmpty(vector<TypeId> &types) {
}
}

void DataChunk::Initialize(vector<TypeId> &types, bool zero_data) {
void DataChunk::Initialize(vector<TypeId> &types) {
assert(types.size() > 0);
InitializeEmpty(types);
index_t size = 0;
Expand All @@ -34,9 +34,7 @@ void DataChunk::Initialize(vector<TypeId> &types, bool zero_data) {
}
assert(size > 0);
owned_data = unique_ptr<data_t[]>(new data_t[size]);
if (zero_data) {
memset(owned_data.get(), 0, size);
}
memset(owned_data.get(), 0, size);

auto ptr = owned_data.get();
for (index_t i = 0; i < types.size(); i++) {
Expand Down
1 change: 1 addition & 0 deletions src/common/types/vector.cpp
Expand Up @@ -96,6 +96,7 @@ void Vector::Initialize(TypeId new_type, bool zero_data) {
if (zero_data) {
memset(data, 0, STANDARD_VECTOR_SIZE * GetTypeIdSize(type));
}
nullmask.reset();
}

void Vector::Destroy() {
Expand Down
16 changes: 8 additions & 8 deletions src/common/vector_operations/numeric_functions.cpp
Expand Up @@ -96,55 +96,55 @@ void VectorOperations::Sin(Vector &left, Vector &result) {
if (left.type != TypeId::DOUBLE) {
throw InvalidTypeException(left.type, "Invalid type for SIN");
}
templated_unary_loop<double, double, duckdb::Sin>(left, result);
templated_unary_loop<double, double, duckdb::Sin, true>(left, result);
}

void VectorOperations::Cos(Vector &left, Vector &result) {
if (left.type != TypeId::DOUBLE) {
throw InvalidTypeException(left.type, "Invalid type for COS");
}
templated_unary_loop<double, double, duckdb::Cos>(left, result);
templated_unary_loop<double, double, duckdb::Cos, true>(left, result);
}

void VectorOperations::Tan(Vector &left, Vector &result) {
if (left.type != TypeId::DOUBLE) {
throw InvalidTypeException(left.type, "Invalid type for TAN");
}
templated_unary_loop<double, double, duckdb::Tan>(left, result);
templated_unary_loop<double, double, duckdb::Tan, true>(left, result);
}

void VectorOperations::ASin(Vector &left, Vector &result) {
if (left.type != TypeId::DOUBLE) {
throw InvalidTypeException(left.type, "Invalid type for ASIN");
}
templated_unary_loop<double, double, duckdb::ASin>(left, result);
templated_unary_loop<double, double, duckdb::ASin, true>(left, result);
}

void VectorOperations::ACos(Vector &left, Vector &result) {
if (left.type != TypeId::DOUBLE) {
throw InvalidTypeException(left.type, "Invalid type for ACOS");
}
templated_unary_loop<double, double, duckdb::ACos>(left, result);
templated_unary_loop<double, double, duckdb::ACos, true>(left, result);
}

void VectorOperations::ATan(Vector &left, Vector &result) {
if (left.type != TypeId::DOUBLE) {
throw InvalidTypeException(left.type, "Invalid type for ACOS");
}
templated_unary_loop<double, double, duckdb::ATan>(left, result);
templated_unary_loop<double, double, duckdb::ATan, true>(left, result);
}

void VectorOperations::ATan2(Vector &left, Vector &right, Vector &result) {
if (left.type != TypeId::DOUBLE || right.type != TypeId::DOUBLE) {
throw InvalidTypeException(left.type, "Invalid type for ATAN2");
}
templated_binary_loop<double, double, double, duckdb::ATan2>(left, right, result);
templated_binary_loop<double, double, double, duckdb::ATan2, true>(left, right, result);
}

void VectorOperations::Sign(Vector &input, Vector &result) {
unary_numeric_op_tintret<duckdb::Sign>(input, result);
}

void VectorOperations::Pow(Vector &base, Vector &exponent, Vector &result) {
templated_binary_loop<double, double, double, duckdb::Pow>(base, exponent, result);
templated_binary_loop<double, double, double, duckdb::Pow, true>(base, exponent, result);
}
4 changes: 2 additions & 2 deletions src/execution/aggregate_hashtable.cpp
Expand Up @@ -82,7 +82,7 @@ void SuperLargeHashTable::Resize(index_t size) {
auto new_table = make_unique<SuperLargeHashTable>(size, group_types, payload_types, aggregates, parallel);

DataChunk groups;
groups.Initialize(group_types, false);
groups.Initialize(group_types);

Vector addresses(TypeId::POINTER, true, false);
auto data_pointers = (data_ptr_t *)addresses.data;
Expand Down Expand Up @@ -176,7 +176,7 @@ void SuperLargeHashTable::AddChunk(DataChunk &groups, DataChunk &payload) {
vector<TypeId> probe_types(group_types);
probe_types.push_back(payload_types[payload_idx]);
DataChunk probe_chunk;
probe_chunk.Initialize(probe_types, false);
probe_chunk.Initialize(probe_types);
for (index_t group_idx = 0; group_idx < group_types.size(); group_idx++) {
probe_chunk.data[group_idx].Reference(groups.data[group_idx]);
}
Expand Down
3 changes: 2 additions & 1 deletion src/include/common/enums/tableref_type.hpp
Expand Up @@ -21,7 +21,8 @@ enum class TableReferenceType : uint8_t {
SUBQUERY = 2, // output of a subquery
JOIN = 3, // output of join
CROSS_PRODUCT = 4, // out of cartesian product
TABLE_FUNCTION = 5 // table producing function
TABLE_FUNCTION = 5, // table producing function
EMPTY = 6 // placeholder for empty FROM
};

} // namespace duckdb
3 changes: 1 addition & 2 deletions src/include/common/types/data_chunk.hpp
Expand Up @@ -62,8 +62,7 @@ class DataChunk {
//! This will create one vector of the specified type for each TypeId in the
//! types list. The vector will be referencing vector to the data owned by
//! the DataChunk.
//! If zero_data is set to true, the data is zero-initialized.
void Initialize(vector<TypeId> &types, bool zero_data = false);
void Initialize(vector<TypeId> &types);
//! Initializes an empty DataChunk with the given types. The vectors will *not* have any data allocated for them.
void InitializeEmpty(vector<TypeId> &types);
//! Append the other DataChunk to this one. The column count and types of
Expand Down
14 changes: 9 additions & 5 deletions src/include/common/vector_operations/unary_loops.hpp
Expand Up @@ -20,18 +20,22 @@ inline void UNARY_TYPE_CHECK(Vector &input, Vector &result) {
}
}

template <class LEFT_TYPE, class RESULT_TYPE, class OP>
template <class LEFT_TYPE, class RESULT_TYPE, class OP, bool IGNORE_NULL=false>
static inline void unary_loop_function(LEFT_TYPE *__restrict ldata, RESULT_TYPE *__restrict result_data, index_t count,
sel_t *__restrict sel_vector) {
sel_t *__restrict sel_vector, nullmask_t nullmask) {
ASSERT_RESTRICT(ldata, ldata + count, result_data, result_data + count);
VectorOperations::Exec(sel_vector, count, [&](index_t i, index_t k) { result_data[i] = OP::Operation(ldata[i]); });
VectorOperations::Exec(sel_vector, count, [&](index_t i, index_t k) {
if (!IGNORE_NULL || !nullmask[i]) {
result_data[i] = OP::Operation(ldata[i]);
}
});
}

template <class LEFT_TYPE, class RESULT_TYPE, class OP> void templated_unary_loop(Vector &input, Vector &result) {
template <class LEFT_TYPE, class RESULT_TYPE, class OP, bool IGNORE_NULL=false> void templated_unary_loop(Vector &input, Vector &result) {
auto ldata = (LEFT_TYPE *)input.data;
auto result_data = (RESULT_TYPE *)result.data;

unary_loop_function<LEFT_TYPE, RESULT_TYPE, OP>(ldata, result_data, input.count, input.sel_vector);
unary_loop_function<LEFT_TYPE, RESULT_TYPE, OP, IGNORE_NULL>(ldata, result_data, input.count, input.sel_vector, input.nullmask);
result.nullmask = input.nullmask;
result.sel_vector = input.sel_vector;
result.count = input.count;
Expand Down
Expand Up @@ -34,7 +34,7 @@ class PhysicalBlockwiseNLJoin : public PhysicalJoin {
class PhysicalBlockwiseNLJoinState : public PhysicalOperatorState {
public:
PhysicalBlockwiseNLJoinState(PhysicalOperator *left, PhysicalOperator *right)
: PhysicalOperatorState(left), left_position(0), right_position(0), fill_in_rhs(false) {
: PhysicalOperatorState(left), left_position(0), right_position(0), fill_in_rhs(false), checked_found_match(false) {
assert(left && right);
}

Expand Down
30 changes: 30 additions & 0 deletions src/include/parser/tableref/emptytableref.hpp
@@ -0,0 +1,30 @@
//===----------------------------------------------------------------------===//
// DuckDB
//
// parser/tableref/emptytableref.hpp
//
//
//===----------------------------------------------------------------------===//

#pragma once

#include "parser/tableref.hpp"

namespace duckdb {
//! Represents a cross product
class EmptyTableRef : public TableRef {
public:
EmptyTableRef() : TableRef(TableReferenceType::EMPTY) {
}

public:
bool Equals(const TableRef *other_) const override;

unique_ptr<TableRef> Copy() override;

//! Serializes a blob into a DummyTableRef
void Serialize(Serializer &serializer) override;
//! Deserializes a blob back into a DummyTableRef
static unique_ptr<TableRef> Deserialize(Deserializer &source);
};
} // namespace duckdb
1 change: 1 addition & 0 deletions src/include/parser/tableref/list.hpp
@@ -1,5 +1,6 @@
#include "parser/tableref/basetableref.hpp"
#include "parser/tableref/crossproductref.hpp"
#include "parser/tableref/emptytableref.hpp"
#include "parser/tableref/joinref.hpp"
#include "parser/tableref/subqueryref.hpp"
#include "parser/tableref/table_function_ref.hpp"
1 change: 1 addition & 0 deletions src/include/parser/tokens.hpp
Expand Up @@ -77,5 +77,6 @@ class CrossProductRef;
class JoinRef;
class SubqueryRef;
class TableFunctionRef;
class EmptyTableRef;

} // namespace duckdb
1 change: 1 addition & 0 deletions src/include/planner/binder.hpp
Expand Up @@ -117,6 +117,7 @@ class Binder {
unique_ptr<BoundTableRef> Bind(JoinRef &ref);
unique_ptr<BoundTableRef> Bind(SubqueryRef &ref);
unique_ptr<BoundTableRef> Bind(TableFunctionRef &ref);
unique_ptr<BoundTableRef> Bind(EmptyTableRef &ref);
};

} // namespace duckdb
1 change: 1 addition & 0 deletions src/include/planner/bound_tokens.hpp
Expand Up @@ -62,5 +62,6 @@ class BoundCrossProductRef;
class BoundJoinRef;
class BoundSubqueryRef;
class BoundTableFunction;
class BoundEmptyTableRef;

} // namespace duckdb
2 changes: 2 additions & 0 deletions src/include/planner/logical_plan_generator.hpp
Expand Up @@ -50,6 +50,8 @@ class LogicalPlanGenerator {
unique_ptr<LogicalOperator> CreatePlan(BoundJoinRef &ref);
unique_ptr<LogicalOperator> CreatePlan(BoundSubqueryRef &ref);
unique_ptr<LogicalOperator> CreatePlan(BoundTableFunction &ref);
unique_ptr<LogicalOperator> CreatePlan(BoundEmptyTableRef &ref);


void PlanSubqueries(unique_ptr<Expression> *expr, unique_ptr<LogicalOperator> *root);

Expand Down
2 changes: 1 addition & 1 deletion src/include/planner/operator/logical_get.hpp
Expand Up @@ -16,7 +16,7 @@ namespace duckdb {
//! LogicalGet represents a scan operation from a data source
class LogicalGet : public LogicalOperator {
public:
LogicalGet() : LogicalOperator(LogicalOperatorType::GET), table(nullptr) {
LogicalGet(index_t table_index) : LogicalOperator(LogicalOperatorType::GET), table(nullptr), table_index(table_index) {
}
LogicalGet(TableCatalogEntry *table, index_t table_index, vector<column_t> column_ids)
: LogicalOperator(LogicalOperatorType::GET), table(table), table_index(table_index), column_ids(column_ids) {
Expand Down
22 changes: 22 additions & 0 deletions src/include/planner/tableref/bound_dummytableref.hpp
@@ -0,0 +1,22 @@
//===----------------------------------------------------------------------===//
// DuckDB
//
// planner/tableref/bound_emptytableref.hpp
//
//
//===----------------------------------------------------------------------===//

#pragma once

#include "planner/bound_tableref.hpp"

namespace duckdb {

//! Represents a cross product
class BoundEmptyTableRef : public BoundTableRef {
public:
BoundEmptyTableRef(index_t bind_index) : BoundTableRef(TableReferenceType::EMPTY), bind_index(bind_index) {
}
index_t bind_index;
};
} // namespace duckdb
1 change: 1 addition & 0 deletions src/include/planner/tableref/list.hpp
@@ -1,5 +1,6 @@
#include "planner/tableref/bound_basetableref.hpp"
#include "planner/tableref/bound_crossproductref.hpp"
#include "planner/tableref/bound_dummytableref.hpp"
#include "planner/tableref/bound_joinref.hpp"
#include "planner/tableref/bound_subqueryref.hpp"
#include "planner/tableref/bound_table_function.hpp"
2 changes: 1 addition & 1 deletion src/include/transaction/transaction_context.hpp
Expand Up @@ -20,7 +20,7 @@ class TransactionManager;
class TransactionContext {
public:
TransactionContext(TransactionManager &transaction_manager)
: transaction_manager(transaction_manager), auto_commit(true), current_transaction(nullptr) {
: transaction_manager(transaction_manager), auto_commit(true), is_invalidated(false), current_transaction(nullptr) {
}
~TransactionContext();

Expand Down
3 changes: 3 additions & 0 deletions src/parser/tableref.cpp
Expand Up @@ -33,6 +33,9 @@ unique_ptr<TableRef> TableRef::Deserialize(Deserializer &source) {
case TableReferenceType::TABLE_FUNCTION:
result = TableFunctionRef::Deserialize(source);
break;
case TableReferenceType::EMPTY:
result = EmptyTableRef::Deserialize(source);
break;
case TableReferenceType::INVALID:
return nullptr;
}
Expand Down
1 change: 1 addition & 0 deletions src/parser/tableref/CMakeLists.txt
Expand Up @@ -2,6 +2,7 @@ add_library_unity(duckdb_parser_tableref
OBJECT
basetableref.cpp
crossproductref.cpp
emptytableref.cpp
joinref.cpp
subqueryref.cpp
table_function.cpp)
Expand Down
22 changes: 22 additions & 0 deletions src/parser/tableref/emptytableref.cpp
@@ -0,0 +1,22 @@
#include "parser/tableref/emptytableref.hpp"

#include "common/serializer.hpp"

using namespace duckdb;
using namespace std;

bool EmptyTableRef::Equals(const TableRef *other_) const {
return true; // they are always the same they only have a type
}

unique_ptr<TableRef> EmptyTableRef::Copy() {
return make_unique<EmptyTableRef>();
}

void EmptyTableRef::Serialize(Serializer &serializer) {
TableRef::Serialize(serializer);
}

unique_ptr<TableRef> EmptyTableRef::Deserialize(Deserializer &source) {
return make_unique<EmptyTableRef>();
}
2 changes: 0 additions & 2 deletions src/parser/transform/expression/transform_operator.cpp
Expand Up @@ -73,12 +73,10 @@ unique_ptr<ParsedExpression> Transformer::TransformAExpr(postgres::A_Expr *root)
if (!root) {
return nullptr;
}
ExpressionType target_type;
auto name = string((reinterpret_cast<postgres::Value *>(root->name->head->data.ptr_value))->val.str);

switch (root->kind) {
case postgres::AEXPR_DISTINCT:
target_type = ExpressionType::COMPARE_DISTINCT_FROM;
break;
case postgres::AEXPR_IN: {
auto left_expr = TransformExpression(root->lexpr);
Expand Down

0 comments on commit d9dcbdc

Please sign in to comment.