Skip to content

Commit

Permalink
Merge 45ba3d9 into 7176d1b
Browse files Browse the repository at this point in the history
  • Loading branch information
wcampbell0x2a committed Dec 27, 2023
2 parents 7176d1b + 45ba3d9 commit 2bdf2be
Show file tree
Hide file tree
Showing 13 changed files with 273 additions and 3 deletions.
18 changes: 18 additions & 0 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,9 @@ struct FieldData {
/// a predicate to decide when to stop reading elements into the container
until: Option<TokenStream>,

/// read until `reader.end()`
read_all: bool,

/// apply a function to the field after it's read
map: Option<TokenStream>,

Expand Down Expand Up @@ -454,6 +457,7 @@ impl FieldData {
bits_read: receiver.bits_read?,
bytes_read: receiver.bytes_read?,
until: receiver.until?,
read_all: receiver.read_all,
map: receiver.map?,
ctx,
update: receiver.update?,
Expand Down Expand Up @@ -521,6 +525,16 @@ impl FieldData {
));
}

// Validate usage of read_all
if data.read_all
&& (data.count.is_some() || (data.bits_read.is_some() || data.bytes_read.is_some()))
{
return Err(cerror(
data.bits.span(),
"conflicting: `read_all` cannot be used with `count`, `bits_read`, or `bytes_read`",
));
}

Ok(())
}

Expand Down Expand Up @@ -759,6 +773,10 @@ struct DekuFieldReceiver {
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
until: Result<Option<TokenStream>, ReplacementError>,

/// read until `reader.end()`
#[darling(default)]
read_all: bool,

/// apply a function to the field after it's read
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
map: Result<Option<TokenStream>, ReplacementError>,
Expand Down
11 changes: 11 additions & 0 deletions deku-derive/src/macros/deku_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -683,6 +683,17 @@ fn emit_field_read(
(::#crate_::ctx::Limit::new_until(#field_until), (#read_args))
)?
}
} else if f.read_all {
quote! {
{
use core::borrow::Borrow;
#type_as_deku_read::from_reader_with_ctx
(
__deku_reader,
(::#crate_::ctx::Limit::end(), (#read_args))
)?
}
}
} else {
quote! {
#type_as_deku_read::from_reader_with_ctx
Expand Down
23 changes: 23 additions & 0 deletions examples/read_all.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
use deku::prelude::*;
use std::convert::{TryFrom, TryInto};

fn main() {
#[derive(PartialEq, Debug, DekuRead, DekuWrite)]
struct TestStruct {
#[deku(read_all)]
data: Vec<(u8, u8)>,
}

let test_data: Vec<u8> = [0xaa, 0xbb, 0xcc, 0xdd].to_vec();

let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap();
assert_eq!(
TestStruct {
data: vec![(0xaa, 0xbb), (0xcc, 0xdd)]
},
ret_read
);

let ret_write: Vec<u8> = ret_read.try_into().unwrap();
assert_eq!(test_data, ret_write);
}
40 changes: 39 additions & 1 deletion src/attributes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ enum DekuEnum {
| [bits_read](#bits_read) | field | Set the field representing the number of bits to read into a container
| [bytes_read](#bytes_read) | field | Set the field representing the number of bytes to read into a container
| [until](#until) | field | Set a predicate returning when to stop reading elements into a container
| [read_all](#read_all) | field | Read until [reader.end()] returns `true`
| [update](#update) | field | Apply code over the field when `.update()` is called
| [temp](#temp) | field | Read the field but exclude it from the struct/enum
| [temp_value](#temp_value) | field | Write the field but exclude it from the struct/enum
Expand Down Expand Up @@ -412,6 +413,43 @@ assert_eq!(
},
value
);
```
# read_all
Read values into the container until [reader.end()] returns `true`.
Example:
```rust
# use deku::prelude::*;
# use std::convert::{TryInto, TryFrom};
# #[derive(Debug, PartialEq, DekuRead, DekuWrite)]
struct InnerDekuTest {
field_a: u8,
field_b: u8
}
# #[derive(Debug, PartialEq, DekuRead, DekuWrite)]
struct DekuTest {
#[deku(read_all)]
items: Vec<InnerDekuTest>,
}
let data: &[u8] = &[0xAB, 0xBC, 0xDE, 0xEF];
let value = DekuTest::try_from(data).unwrap();
assert_eq!(
DekuTest {
items: vec![
InnerDekuTest{field_a: 0xAB, field_b: 0xBC},
InnerDekuTest{field_a: 0xDE, field_b: 0xEF}],
},
value
);
let value: Vec<u8> = value.try_into().unwrap();
assert_eq!(&*data, value);
```
Expand Down Expand Up @@ -1201,5 +1239,5 @@ let value: Vec<u8> = value.try_into().unwrap();
assert_eq!(data, value);
```
[reader.end()]: crate::reader::Reader::end()
*/
11 changes: 11 additions & 0 deletions src/ctx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,9 @@ pub enum Limit<T, Predicate: FnMut(&T) -> bool> {

/// Read until a given quantity of bits have been read
BitSize(BitSize),

/// Read until `reader.end()` is true. Used for `read_all` attribute.
End,
}

impl<T> From<usize> for Limit<T, fn(&T) -> bool> {
Expand Down Expand Up @@ -135,6 +138,14 @@ impl<T, Predicate: for<'a> FnMut(&'a T) -> bool> Limit<T, Predicate> {
}
}

impl<T> Limit<T, fn(&T) -> bool> {
/// Read until `reader.end()` is true
#[inline]
pub fn end() -> Self {
Self::End
}
}

impl<T> Limit<T, fn(&T) -> bool> {
/// Constructs a new Limit that reads until the given number of elements are read
#[inline]
Expand Down
28 changes: 28 additions & 0 deletions src/impls/hashmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,30 @@ where
Ok(res)
}

fn from_reader_with_ctx_hashmap_to_end<'a, K, V, S, Ctx, R: Read>(
reader: &mut crate::reader::Reader<R>,
capacity: Option<usize>,
ctx: Ctx,
) -> Result<HashMap<K, V, S>, DekuError>
where
K: DekuReader<'a, Ctx> + Eq + Hash,
V: DekuReader<'a, Ctx>,
S: BuildHasher + Default,
Ctx: Copy,
{
let mut res = HashMap::with_capacity_and_hasher(capacity.unwrap_or(0), S::default());

loop {
if reader.end() {
break;
}
let val = <(K, V)>::from_reader_with_ctx(reader, ctx)?;
res.insert(val.0, val.1);
}

Ok(res)
}

impl<'a, K, V, S, Ctx, Predicate> DekuReader<'a, (Limit<(K, V), Predicate>, Ctx)>
for HashMap<K, V, S>
where
Expand Down Expand Up @@ -136,6 +160,9 @@ where
move |read_bits, _| read_bits == bit_size,
)
}

// Read until `reader.end()` is true
Limit::End => from_reader_with_ctx_hashmap_to_end(reader, None, inner_ctx),
}
}
}
Expand Down Expand Up @@ -218,6 +245,7 @@ mod tests {
case::count_2([0x01, 0xAA, 0x02, 0xBB, 0xBB].as_ref(), Endian::Little, Some(8), 2.into(), fxhashmap!{0x01 => 0xAA, 0x02 => 0xBB}, bits![u8, Msb0;], &[0xbb]),
case::until_null([0x01, 0xAA, 0, 0, 0xBB].as_ref(), Endian::Little, None, (|kv: &(u8, u8)| kv.0 == 0u8 && kv.1 == 0u8).into(), fxhashmap!{0x01 => 0xAA, 0 => 0}, bits![u8, Msb0;], &[0xbb]),
case::until_bits([0x01, 0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(16).into(), fxhashmap!{0x01 => 0xAA}, bits![u8, Msb0;], &[0xbb]),
case::read_all([0x01, 0xAA].as_ref(), Endian::Little, None, Limit::end(), fxhashmap!{0x01 => 0xAA}, bits![u8, Msb0;], &[]),
case::bits_6([0b0000_0100, 0b1111_0000, 0b1000_0000].as_ref(), Endian::Little, Some(6), 2.into(), fxhashmap!{0x01 => 0x0F, 0x02 => 0}, bits![u8, Msb0;], &[]),
#[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")]
case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), FxHashMap::default(), bits![u8, Msb0;], &[]),
Expand Down
27 changes: 27 additions & 0 deletions src/impls/hashset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,29 @@ where
Ok(res)
}

fn from_reader_with_ctx_hashset_to_end<'a, T, S, Ctx, R: Read>(
reader: &mut crate::reader::Reader<R>,
capacity: Option<usize>,
ctx: Ctx,
) -> Result<HashSet<T, S>, DekuError>
where
T: DekuReader<'a, Ctx> + Eq + Hash,
S: BuildHasher + Default,
Ctx: Copy,
{
let mut res = HashSet::with_capacity_and_hasher(capacity.unwrap_or(0), S::default());

loop {
if reader.end() {
break;
}
let val = <T>::from_reader_with_ctx(reader, ctx)?;
res.insert(val);
}

Ok(res)
}

impl<'a, T, S, Ctx, Predicate> DekuReader<'a, (Limit<T, Predicate>, Ctx)> for HashSet<T, S>
where
T: DekuReader<'a, Ctx> + Eq + Hash,
Expand Down Expand Up @@ -131,6 +154,9 @@ where
move |read_bits, _| read_bits == bit_size,
)
}

// Read until `reader.end()` is true
Limit::End => from_reader_with_ctx_hashset_to_end(reader, None, inner_ctx),
}
}
}
Expand Down Expand Up @@ -190,6 +216,7 @@ mod tests {
case::count_2([0xAA, 0xBB, 0xCC].as_ref(), Endian::Little, Some(8), 2.into(), vec![0xAA, 0xBB].into_iter().collect(), bits![u8, Msb0;], &[0xcc]),
case::until_null([0xAA, 0, 0xBB].as_ref(), Endian::Little, None, (|v: &u8| *v == 0u8).into(), vec![0xAA, 0].into_iter().collect(), bits![u8, Msb0;], &[0xbb]),
case::until_bits([0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(8).into(), vec![0xAA].into_iter().collect(), bits![u8, Msb0;], &[0xbb]),
case::read_all([0xAA, 0xBB].as_ref(), Endian::Little, None, Limit::end(), vec![0xAA, 0xBB].into_iter().collect(), bits![u8, Msb0;], &[]),
case::bits_6([0b0110_1001, 0b1110_1001].as_ref(), Endian::Little, Some(6), 2.into(), vec![0b00_011010, 0b00_011110].into_iter().collect(), bits![u8, Msb0; 1, 0, 0, 1], &[]),
#[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")]
case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), FxHashSet::default(), bits![u8, Msb0;], &[]),
Expand Down
25 changes: 25 additions & 0 deletions src/impls/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,27 @@ where
Ok(res)
}

fn reader_vec_to_end<'a, T, Ctx, R: Read>(
reader: &mut crate::reader::Reader<R>,
capacity: Option<usize>,
ctx: Ctx,
) -> Result<Vec<T>, DekuError>
where
T: DekuReader<'a, Ctx>,
Ctx: Copy,
{
let mut res = capacity.map_or_else(Vec::new, Vec::with_capacity);
loop {
if reader.end() {
break;
}
let val = <T>::from_reader_with_ctx(reader, ctx)?;
res.push(val);
}

Ok(res)
}

impl<'a, T, Ctx, Predicate> DekuReader<'a, (Limit<T, Predicate>, Ctx)> for Vec<T>
where
T: DekuReader<'a, Ctx>,
Expand Down Expand Up @@ -104,6 +125,8 @@ where
read_bits == bit_size
})
}

Limit::End => reader_vec_to_end(reader, None, inner_ctx),
}
}
}
Expand Down Expand Up @@ -157,6 +180,8 @@ mod tests {
case::count_2([0xAA, 0xBB, 0xCC].as_ref(), Endian::Little, Some(8), 2.into(), vec![0xAA, 0xBB], bits![u8, Msb0;], &[0xcc]),
case::until_null([0xAA, 0, 0xBB].as_ref(), Endian::Little, None, (|v: &u8| *v == 0u8).into(), vec![0xAA, 0], bits![u8, Msb0;], &[0xbb]),
case::until_bits([0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(8).into(), vec![0xAA], bits![u8, Msb0;], &[0xbb]),
case::end([0xAA, 0xBB].as_ref(), Endian::Little, None, Limit::end(), vec![0xaa, 0xbb], bits![u8, Msb0;], &[]),
case::end_bitsize([0xf0, 0xf0].as_ref(), Endian::Little, Some(4), Limit::end(), vec![0xf, 0x0, 0x0f, 0x0], bits![u8, Msb0;], &[]),
case::bits_6([0b0110_1001, 0b1110_1001].as_ref(), Endian::Little, Some(6), 2.into(), vec![0b00_011010, 0b00_011110], bits![u8, Msb0; 1, 0, 0, 1], &[]),
#[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")]
case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), vec![], bits![u8, Msb0;], &[]),
Expand Down
5 changes: 3 additions & 2 deletions src/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ impl<'a, R: Read> Reader<'a, R> {
self.leftover.iter().by_vals().collect()
}

/// Return true if we are at the end of a reader and there are no cached bits in the reader
/// Return true if we are at the end of a reader and there are no cached bits in the reader.
/// Since this uses [Read] internally, this will return true when [Read] returns [ErrorKind::UnexpectedEof].
///
/// The byte that was read will be internally buffered
#[inline]
Expand Down Expand Up @@ -106,7 +107,7 @@ impl<'a, R: Read> Reader<'a, R> {
}

/// Used at the beginning of `from_reader`.
/// TODO: maybe send into read_bytes() if amt >= 8
// TODO: maybe send into read_bytes() if amt >= 8
#[inline]
pub fn skip_bits(&mut self, amt: usize) -> Result<(), DekuError> {
#[cfg(feature = "logging")]
Expand Down
1 change: 1 addition & 0 deletions tests/test_attributes/test_limits/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
mod test_bits_read;
mod test_bytes_read;
mod test_count;
mod test_read_all;
mod test_until;
46 changes: 46 additions & 0 deletions tests/test_attributes/test_limits/test_read_all.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
use std::convert::{TryFrom, TryInto};

use deku::prelude::*;

#[test]
fn test_read_all() {
#[derive(PartialEq, Debug, DekuRead, DekuWrite)]
struct TestStruct1 {
#[deku(read_all)]
data: Vec<u8>,
}

let test_data: Vec<u8> = [0xaa, 0xbb].to_vec();

let ret_read = TestStruct1::try_from(test_data.as_slice()).unwrap();
assert_eq!(
TestStruct1 {
data: test_data.to_vec()
},
ret_read
);

let ret_write: Vec<u8> = ret_read.try_into().unwrap();
assert_eq!(test_data, ret_write);

#[derive(PartialEq, Debug, DekuRead, DekuWrite)]
struct TestStruct2 {
first: u8,
#[deku(read_all)]
data: Vec<u8>,
}

let test_data: Vec<u8> = [0xff, 0xaa, 0xbb].to_vec();

let ret_read = TestStruct2::try_from(test_data.as_slice()).unwrap();
assert_eq!(
TestStruct2 {
first: 0xff,
data: test_data[1..].to_vec()
},
ret_read
);

let ret_write: Vec<u8> = ret_read.try_into().unwrap();
assert_eq!(test_data, ret_write);
}
Loading

0 comments on commit 2bdf2be

Please sign in to comment.