Skip to content
This repository has been archived by the owner on Jun 7, 2022. It is now read-only.

Commit

Permalink
Merge pull request #236 from darrenldl/dev
Browse files Browse the repository at this point in the history
Code refactoring, coverage test suite fix
  • Loading branch information
darrenldl committed Jul 6, 2019
2 parents 3afd317 + 30a387a commit a1a2bfc
Show file tree
Hide file tree
Showing 6 changed files with 112 additions and 86 deletions.
1 change: 1 addition & 0 deletions cov_tests/copy.sh
Expand Up @@ -7,3 +7,4 @@ cargo build

echo "Copying blkar binary over"
cp target/debug/blkar ./cov_tests/blkar
cp target/debug/blkar .
17 changes: 9 additions & 8 deletions cov_tests/dev_tests.sh
Expand Up @@ -85,14 +85,15 @@ while (( $i < $test_count )); do
cd $t

if [[ $? == 0 ]]; then
find . -type f \
-not -name "exit_code" \
-not -name "log" \
-not -name "stderr_log" \
-not -name "dummy_file_size" \
-not -name "start_time" \
-not -name "end_time" \
-delete
find . -maxdepth 1 \
-type f \
-not -name "exit_code" \
-not -name "log" \
-not -name "stderr_log" \
-not -name "dummy_file_size" \
-not -name "start_time" \
-not -name "end_time" \
-delete

cd ..
fi
Expand Down
33 changes: 32 additions & 1 deletion src/data_block_buffer/mod.rs
Expand Up @@ -583,7 +583,7 @@ impl Lot {
}

impl DataBlockBuffer {
pub fn new(
fn new(
version: Version,
uid: Option<&[u8; SBX_FILE_UID_LEN]>,
input_type: InputType,
Expand Down Expand Up @@ -640,6 +640,37 @@ impl DataBlockBuffer {
}
}

pub fn new_multi(
version: Version,
uid: Option<&[u8; SBX_FILE_UID_LEN]>,
input_type: InputType,
output_type: OutputType,
arrangement: BlockArrangement,
data_par_burst: Option<(usize, usize, usize)>,
meta_enabled: bool,
skip_good: bool,
total_buffer_count: usize,
) -> Vec<Self> {
let mut res = Vec::with_capacity(total_buffer_count);

for i in 0..total_buffer_count {
res.push(Self::new(
version,
uid,
input_type,
output_type,
arrangement,
data_par_burst,
meta_enabled,
skip_good,
i,
total_buffer_count,
));
}

res
}

pub fn lot_count(&self) -> usize {
self.lots.len()
}
Expand Down
87 changes: 42 additions & 45 deletions src/decode_core.rs
Expand Up @@ -861,21 +861,20 @@ pub fn decode(
};

// push buffers into pipeline
for i in 0..PIPELINE_BUFFER_IN_ROTATION {
to_reader
.send(Some(DataBlockBuffer::new(
version,
Some(&ref_block.get_uid()),
InputType::Block,
OutputType::Data,
BlockArrangement::Unordered,
data_par_burst,
true,
skip_good,
i,
PIPELINE_BUFFER_IN_ROTATION,
)))
.unwrap();
let buffers = DataBlockBuffer::new_multi(
version,
Some(&ref_block.get_uid()),
InputType::Block,
OutputType::Data,
BlockArrangement::Unordered,
data_par_burst,
true,
skip_good,
PIPELINE_BUFFER_IN_ROTATION,
);

for buffer in buffers.into_iter() {
to_reader.send(Some(buffer)).unwrap();
}

reporter.start();
Expand Down Expand Up @@ -1127,21 +1126,20 @@ pub fn decode(
// go through data and parity blocks

// push buffers into pipeline
for i in 0..PIPELINE_BUFFER_IN_ROTATION {
to_reader
.send(Some(DataBlockBuffer::new(
version,
Some(&ref_block.get_uid()),
InputType::Block,
OutputType::Data,
BlockArrangement::OrderedButSomeMayBeMissing,
data_par_burst,
true,
false,
i,
PIPELINE_BUFFER_IN_ROTATION,
)))
.unwrap();
let buffers = DataBlockBuffer::new_multi(
version,
Some(&ref_block.get_uid()),
InputType::Block,
OutputType::Data,
BlockArrangement::OrderedButSomeMayBeMissing,
data_par_burst,
true,
false,
PIPELINE_BUFFER_IN_ROTATION,
);

for buffer in buffers.into_iter() {
to_reader.send(Some(buffer)).unwrap();
}

let reader_thread = {
Expand Down Expand Up @@ -1373,21 +1371,20 @@ pub fn decode(
reader.seek(SeekFrom::Start(seek_to))?;

// push buffers into pipeline
for i in 0..PIPELINE_BUFFER_IN_ROTATION {
to_reader
.send(Some(DataBlockBuffer::new(
version,
Some(&ref_block.get_uid()),
InputType::Block,
OutputType::Data,
BlockArrangement::OrderedButSomeMayBeMissing,
data_par_burst,
true,
false,
i,
PIPELINE_BUFFER_IN_ROTATION,
)))
.unwrap();
let buffers = DataBlockBuffer::new_multi(
version,
Some(&ref_block.get_uid()),
InputType::Block,
OutputType::Data,
BlockArrangement::OrderedButSomeMayBeMissing,
data_par_burst,
true,
false,
PIPELINE_BUFFER_IN_ROTATION,
);

for buffer in buffers.into_iter() {
to_reader.send(Some(buffer)).unwrap();
}

let reader_thread = {
Expand Down
29 changes: 14 additions & 15 deletions src/encode_core.rs
Expand Up @@ -609,21 +609,20 @@ pub fn encode_file(param: &Param) -> Result<Stats, Error> {
let worker_shutdown_barrier = Arc::new(Barrier::new(3));

// push buffers into pipeline
for i in 0..PIPELINE_BUFFER_IN_ROTATION {
to_reader
.send(Some(DataBlockBuffer::new(
param.version,
Some(&param.uid),
InputType::Data,
OutputType::Block,
BlockArrangement::OrderedAndNoMissing,
param.data_par_burst,
param.meta_enabled,
false,
i,
PIPELINE_BUFFER_IN_ROTATION,
)))
.unwrap();
let buffers = DataBlockBuffer::new_multi(
param.version,
Some(&param.uid),
InputType::Data,
OutputType::Block,
BlockArrangement::OrderedAndNoMissing,
param.data_par_burst,
param.meta_enabled,
false,
PIPELINE_BUFFER_IN_ROTATION,
);

for buffer in buffers.into_iter() {
to_reader.send(Some(buffer)).unwrap();
}

reporter.start();
Expand Down
31 changes: 14 additions & 17 deletions src/sbx_container_content.rs
Expand Up @@ -43,8 +43,6 @@ pub fn hash(
},
)?;

// let mut block = Block::dummy();

let reporter = Arc::new(ProgressReporter::new(
&stats,
"Stored data hashing progress",
Expand All @@ -63,21 +61,20 @@ pub fn hash(
let worker_shutdown_barrier = Arc::new(Barrier::new(2));

// push buffers into pipeline
for i in 0..PIPELINE_BUFFER_IN_ROTATION {
to_reader
.send(Some(DataBlockBuffer::new(
version,
None,
InputType::Block,
OutputType::Disabled,
BlockArrangement::OrderedButSomeMayBeMissing,
data_par_burst,
true,
false,
i,
PIPELINE_BUFFER_IN_ROTATION,
)))
.unwrap();
let buffers = DataBlockBuffer::new_multi(
version,
None,
InputType::Block,
OutputType::Disabled,
BlockArrangement::OrderedButSomeMayBeMissing,
data_par_burst,
true,
false,
PIPELINE_BUFFER_IN_ROTATION,
);

for buffer in buffers.into_iter() {
to_reader.send(Some(buffer)).unwrap();
}

reporter.start();
Expand Down

0 comments on commit a1a2bfc

Please sign in to comment.