Skip to content
This repository has been archived by the owner on Feb 26, 2020. It is now read-only.

Commit

Permalink
Experiments on column-aware encodings
Browse files Browse the repository at this point in the history
Summary:
Experiments on column-aware encodings. Supported features: 1) extract data blocks from SST file and encode with specified encodings; 2) Decode encoded data back into row format; 3) Directly extract data blocks and write in row format (without prefix encoding); 4) Get column distribution statistics for column format; 5) Dump data blocks separated by columns in human-readable format.

There is still on-going work on this diff. More refactoring is necessary.

Test Plan: Wrote tests in `column_aware_encoding_test.cc`. More tests should be added.

Reviewers: sdong

Reviewed By: sdong

Subscribers: arahut, andrewkr, dhruba

Differential Revision: https://reviews.facebook.net/D60027
  • Loading branch information
omegaga committed Aug 1, 2016
1 parent c116b47 commit d51dc96
Show file tree
Hide file tree
Showing 16 changed files with 1,817 additions and 7 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ build/
ldb
manifest_dump
sst_dump
column_aware_encoding_exp
util/build_version.cc
build_tools/VALGRIND_LOGS/
coverage/COVERAGE_REPORT
Expand Down
5 changes: 5 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,9 @@ set(SOURCES
utilities/ttl/db_ttl_impl.cc
utilities/write_batch_with_index/write_batch_with_index.cc
utilities/write_batch_with_index/write_batch_with_index_internal.cc
utilities/col_buf_encoder.cc
utilities/col_buf_decoder.cc
utilities/column_aware_encoding_util.cc
)

# For test util library that is build only in DEBUG mode
Expand Down Expand Up @@ -327,6 +330,7 @@ set(APPS
tools/dump/rocksdb_undump.cc
util/cache_bench.cc
utilities/persistent_cache/hash_table_bench.cc
utilities/column_aware_encoding_exp.cc
)

set(C_TESTS db/c_test.c)
Expand Down Expand Up @@ -442,6 +446,7 @@ set(TESTS
utilities/transactions/transaction_test.cc
utilities/ttl/ttl_test.cc
utilities/write_batch_with_index/write_batch_with_index_test.cc
utilities/column_aware_encoding_test.cc
)

set(EXES ${APPS})
Expand Down
13 changes: 11 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,8 @@ VALGRIND_OPTS = --error-exitcode=$(VALGRIND_ERROR) --leak-check=full

BENCHTOOLOBJECTS = $(BENCH_LIB_SOURCES:.cc=.o) $(LIBOBJECTS) $(TESTUTIL)

EXPOBJECTS = $(EXP_LIB_SOURCES:.cc=.o) $(LIBOBJECTS) $(TESTUTIL)

TESTS = \
db_test \
db_test2 \
Expand Down Expand Up @@ -367,6 +369,7 @@ TESTS = \
compaction_job_test \
thread_list_test \
sst_dump_test \
column_aware_encoding_test \
compact_files_test \
perf_context_test \
optimistic_transaction_test \
Expand Down Expand Up @@ -416,7 +419,7 @@ TEST_LIBS = \
librocksdb_env_basic_test.a

# TODO: add back forward_iterator_bench, after making it build in all environemnts.
BENCHMARKS = db_bench table_reader_bench cache_bench memtablerep_bench
BENCHMARKS = db_bench table_reader_bench cache_bench memtablerep_bench column_aware_encoding_exp

# if user didn't config LIBNAME, set the default
ifeq ($(LIBNAME),)
Expand Down Expand Up @@ -1176,6 +1179,9 @@ event_logger_test: util/event_logger_test.o $(LIBOBJECTS) $(TESTHARNESS)
sst_dump_test: tools/sst_dump_test.o $(LIBOBJECTS) $(TESTHARNESS)
$(AM_LINK)

column_aware_encoding_test: utilities/column_aware_encoding_test.o $(TESTHARNESS) $(EXPOBJECTS)
$(AM_LINK)

optimistic_transaction_test: utilities/transactions/optimistic_transaction_test.o $(LIBOBJECTS) $(TESTHARNESS)
$(AM_LINK)

Expand Down Expand Up @@ -1206,6 +1212,9 @@ transaction_test: utilities/transactions/transaction_test.o $(LIBOBJECTS) $(TEST
sst_dump: tools/sst_dump.o $(LIBOBJECTS)
$(AM_LINK)

column_aware_encoding_exp: utilities/column_aware_encoding_exp.o $(EXPOBJECTS)
$(AM_LINK)

repair_test: db/repair_test.o db/db_test_util.o $(LIBOBJECTS) $(TESTHARNESS)
$(AM_LINK)

Expand Down Expand Up @@ -1440,7 +1449,7 @@ endif
# Source files dependencies detection
# ---------------------------------------------------------------------------

all_sources = $(LIB_SOURCES) $(MAIN_SOURCES) $(MOCK_LIB_SOURCES) $(TOOL_LIB_SOURCES) $(BENCH_LIB_SOURCES) $(TEST_LIB_SOURCES)
all_sources = $(LIB_SOURCES) $(MAIN_SOURCES) $(MOCK_LIB_SOURCES) $(TOOL_LIB_SOURCES) $(BENCH_LIB_SOURCES) $(TEST_LIB_SOURCES) $(EXP_LIB_SOURCES)
DEPFILES = $(all_sources:.cc=.d)

# Add proper dependency support so changing a .h file forces a .cc file to
Expand Down
11 changes: 9 additions & 2 deletions src.mk
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,12 @@ MOCK_LIB_SOURCES = \
util/fault_injection_test_env.cc

BENCH_LIB_SOURCES = \
tools/db_bench_tool.cc
tools/db_bench_tool.cc \

EXP_LIB_SOURCES = \
utilities/col_buf_encoder.cc \
utilities/col_buf_decoder.cc \
utilities/column_aware_encoding_util.cc

TEST_LIB_SOURCES = \
util/testharness.cc \
Expand Down Expand Up @@ -296,6 +301,7 @@ MAIN_SOURCES = \
utilities/transactions/transaction_test.cc \
utilities/ttl/ttl_test.cc \
utilities/write_batch_with_index/write_batch_with_index_test.cc \
utilities/column_aware_encoding_test.cc \
util/iostats_context_test.cc \
util/log_write_bench.cc \
util/mock_env_test.cc \
Expand All @@ -304,7 +310,8 @@ MAIN_SOURCES = \
util/rate_limiter_test.cc \
util/slice_transform_test.cc \
util/thread_list_test.cc \
util/thread_local_test.cc
util/thread_local_test.cc \
utilities/column_aware_encoding_exp.cc

JNI_NATIVE_SOURCES = \
java/rocksjni/backupenginejni.cc \
Expand Down
4 changes: 2 additions & 2 deletions table/block_based_table_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,8 @@ bool GoodCompressionRatio(size_t compressed_size, size_t raw_size) {
return compressed_size < raw_size - (raw_size / 8u);
}

} // namespace

// format_version is the block format as defined in include/rocksdb/table.h
Slice CompressBlock(const Slice& raw,
const CompressionOptions& compression_options,
Expand Down Expand Up @@ -391,8 +393,6 @@ Slice CompressBlock(const Slice& raw,
return raw;
}

} // namespace

// kBlockBasedTableMagicNumber was picked by running
// echo rocksdb.table.block_based | sha1sum
// and taking the leading 64 bits.
Expand Down
6 changes: 6 additions & 0 deletions table/block_based_table_builder.h
Original file line number Diff line number Diff line change
Expand Up @@ -117,4 +117,10 @@ class BlockBasedTableBuilder : public TableBuilder {
void operator=(const BlockBasedTableBuilder&) = delete;
};

Slice CompressBlock(const Slice& raw,
const CompressionOptions& compression_options,
CompressionType* type, uint32_t format_version,
const Slice& compression_dict,
std::string* compressed_output);

} // namespace rocksdb
51 changes: 51 additions & 0 deletions table/block_based_table_reader.cc
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

#include <string>
#include <utility>
#include <vector>

#include "db/dbformat.h"
#include "db/pinned_iterators_manager.h"
Expand Down Expand Up @@ -1667,6 +1668,56 @@ bool BlockBasedTable::TEST_index_reader_preloaded() const {
return rep_->index_reader != nullptr;
}

Status BlockBasedTable::GetKVPairsFromDataBlocks(
std::vector<KVPairBlock>* kv_pair_blocks) {
std::unique_ptr<InternalIterator> blockhandles_iter(
NewIndexIterator(ReadOptions()));

Status s = blockhandles_iter->status();
if (!s.ok()) {
// Cannot read Index Block
return s;
}

for (blockhandles_iter->SeekToFirst(); blockhandles_iter->Valid();
blockhandles_iter->Next()) {
s = blockhandles_iter->status();

if (!s.ok()) {
break;
}

std::unique_ptr<InternalIterator> datablock_iter;
datablock_iter.reset(
NewDataBlockIterator(rep_, ReadOptions(), blockhandles_iter->value()));
s = datablock_iter->status();

if (!s.ok()) {
// Error reading the block - Skipped
continue;
}

KVPairBlock kv_pair_block;
for (datablock_iter->SeekToFirst(); datablock_iter->Valid();
datablock_iter->Next()) {
s = datablock_iter->status();
if (!s.ok()) {
// Error reading the block - Skipped
break;
}
const Slice& key = datablock_iter->key();
const Slice& value = datablock_iter->value();
std::string key_copy = std::string(key.data(), key.size());
std::string value_copy = std::string(value.data(), value.size());

kv_pair_block.push_back(
std::make_pair(std::move(key_copy), std::move(value_copy)));
}
kv_pair_blocks->push_back(std::move(kv_pair_block));
}
return Status::OK();
}

Status BlockBasedTable::DumpTable(WritableFile* out_file) {
// Output Footer
out_file->Append(
Expand Down
9 changes: 8 additions & 1 deletion table/block_based_table_reader.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@

#include <stdint.h>
#include <memory>
#include <utility>
#include <string>
#include <utility>
#include <vector>

#include "rocksdb/options.h"
#include "rocksdb/persistent_cache.h"
Expand Down Expand Up @@ -48,6 +49,8 @@ class InternalIterator;

using std::unique_ptr;

typedef std::vector<std::pair<std::string, std::string>> KVPairBlock;

// A Table is a sorted map from strings to strings. Tables are
// immutable and persistent. A Table may be safely accessed from
// multiple threads without external synchronization.
Expand Down Expand Up @@ -138,6 +141,10 @@ class BlockBasedTable : public TableReader {
size_t cache_key_prefix_size,
const BlockHandle& handle, char* cache_key);

// Retrieve all key value pairs from data blocks in the table.
// The key retrieved are internal keys.
Status GetKVPairsFromDataBlocks(std::vector<KVPairBlock>* kv_pair_blocks);

private:
template <class TValue>
struct CachableEntry;
Expand Down
Loading

0 comments on commit d51dc96

Please sign in to comment.