Skip to content

Commit

Permalink
Merge 09a07d8 into c300f40
Browse files Browse the repository at this point in the history
  • Loading branch information
wcampbell0x2a committed Nov 3, 2023
2 parents c300f40 + 09a07d8 commit 27866fa
Show file tree
Hide file tree
Showing 29 changed files with 1,213 additions and 272 deletions.
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ trybuild = "1.0.77"
rustc-hash = "1.1.0"
env_logger = "0.10.0"
assert_hex = "0.2.2"
log = "0.4.17"

[[bench]]
name = "deku"
Expand Down
18 changes: 18 additions & 0 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,9 @@ struct DekuData {

/// enum only: byte size of the enum `id`
bytes: Option<Num>,

/// Bit Order for all fields
bit_order: Option<syn::LitStr>,
}

impl DekuData {
Expand Down Expand Up @@ -184,13 +187,15 @@ impl DekuData {
id_type: receiver.id_type?,
bits: receiver.bits,
bytes: receiver.bytes,
bit_order: receiver.bit_order,
};

DekuData::validate(&data)?;

Ok(data)
}

// TODO: Add #[bit_order] require #[bytes]
fn validate(data: &DekuData) -> Result<(), TokenStream> {
// Validate `ctx_default`
if data.ctx_default.is_some() && data.ctx.is_none() {
Expand Down Expand Up @@ -315,6 +320,7 @@ impl<'a> TryFrom<&'a DekuData> for DekuDataEnum<'a> {
deku_data.endian.as_ref(),
deku_data.bits.as_ref(),
deku_data.bytes.as_ref(),
deku_data.bit_order.as_ref(),
)?;

Ok(Self {
Expand Down Expand Up @@ -434,6 +440,9 @@ struct FieldData {

// assert value of field
assert_eq: Option<TokenStream>,

/// Bit Order of field
bit_order: Option<syn::LitStr>,
}

impl FieldData {
Expand Down Expand Up @@ -470,6 +479,7 @@ impl FieldData {
cond: receiver.cond?,
assert: receiver.assert?,
assert_eq: receiver.assert_eq?,
bit_order: receiver.bit_order,
};

FieldData::validate(&data)?;
Expand Down Expand Up @@ -649,6 +659,10 @@ struct DekuReceiver {
/// enum only: byte size of the enum `id`
#[darling(default)]
bytes: Option<Num>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

type ReplacementError = TokenStream;
Expand Down Expand Up @@ -825,6 +839,10 @@ struct DekuFieldReceiver {
// assert value of field
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
assert_eq: Result<Option<TokenStream>, ReplacementError>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

/// Receiver for the variant-level attributes inside a enum
Expand Down
8 changes: 6 additions & 2 deletions deku-derive/src/macros/deku_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -517,9 +517,11 @@ fn emit_padding(bit_size: &TokenStream) -> TokenStream {
if (__deku_pad % 8) == 0 {
let bytes_read = __deku_pad / 8;
let mut buf = vec![0; bytes_read];
let _ = __deku_reader.read_bytes(bytes_read, &mut buf)?;
// TODO: use skip_bytes, or Seek in the future?
let _ = __deku_reader.read_bytes(bytes_read, &mut buf, ::#crate_::ctx::Order::Msb0)?;
} else {
let _ = __deku_reader.read_bits(__deku_pad)?;
// TODO: use skip_bits, or Seek in the future?
let _ = __deku_reader.read_bits(__deku_pad, ::#crate_::ctx::Order::Msb0)?;
}
}
}
Expand All @@ -536,6 +538,7 @@ fn emit_field_read(
let field_type = &f.ty;

let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

let field_reader = &f.reader;

Expand Down Expand Up @@ -617,6 +620,7 @@ fn emit_field_read(
f.bits.as_ref(),
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

// The __deku_reader limiting options are special, we need to generate `(limit, (other, ..))` for them.
Expand Down
2 changes: 2 additions & 0 deletions deku-derive/src/macros/deku_write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,7 @@ fn emit_field_write(
) -> Result<TokenStream, syn::Error> {
let crate_ = super::get_crate_name();
let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

// fields to check usage of bit/byte offset
let field_check_vars = [
Expand Down Expand Up @@ -483,6 +484,7 @@ fn emit_field_write(
f.bits.as_ref(),
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

if f.temp {
Expand Down
46 changes: 38 additions & 8 deletions deku-derive/src/macros/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -238,17 +238,24 @@ pub(crate) fn gen_id_args(
endian: Option<&syn::LitStr>,
bits: Option<&Num>,
bytes: Option<&Num>,
bit_order: Option<&syn::LitStr>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::BitSize(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::ByteSize(#n)});
let bit_order = bit_order.map(gen_bit_order_from_str).transpose()?;

// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let id_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
let id_args = [
endian.as_ref(),
bits.as_ref(),
bytes.as_ref(),
bit_order.as_ref(),
]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();

match &id_args[..] {
[arg] => Ok(quote! {#arg}),
Expand All @@ -265,18 +272,27 @@ fn gen_field_args(
bits: Option<&Num>,
bytes: Option<&Num>,
ctx: Option<&Punctuated<syn::Expr, syn::token::Comma>>,
bit_order: Option<&syn::LitStr>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::BitSize(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::ByteSize(#n)});
let bit_order = bit_order.map(gen_bit_order_from_str).transpose()?;
let ctx = ctx.map(|c| quote! {#c});

// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let field_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref(), ctx.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
// TODO: the order here should be documented
let field_args = [
endian.as_ref(),
bits.as_ref(),
bytes.as_ref(),
bit_order.as_ref(),
ctx.as_ref(),
]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();

// Because `impl DekuRead<'_, (T1, T2)>` but `impl DekuRead<'_, T1>`(not tuple)
match &field_args[..] {
Expand All @@ -299,6 +315,20 @@ fn gen_endian_from_str(s: &syn::LitStr) -> syn::Result<TokenStream> {
}
}

/// Generate bit_order tokens from string: `lsb` -> `Order::Lsb0`.
fn gen_bit_order_from_str(s: &syn::LitStr) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
match s.value().as_str() {
"lsb" => Ok(quote! {::#crate_::ctx::Order::Lsb0}),
"msb" => Ok(quote! {::#crate_::ctx::Order::Msb0}),
_ => {
// treat as variable, possibly from `ctx`
let v: TokenStream = s.value().parse()?;
Ok(quote! {#v})
}
}
}

/// Wraps a TokenStream with a closure providing access to `ctx` variables when
/// `ctx_default` is provided
fn wrap_default_ctx(
Expand Down
1 change: 0 additions & 1 deletion ensure_no_std/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,3 @@ alloc = []
cortex-m-rt = "0.7.3"
deku = { path = "../", default-features = false, features = ["alloc"] }
embedded-alloc = "0.5.0"

1 change: 0 additions & 1 deletion examples/custom_reader_and_writer.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::convert::TryInto;

use deku::bitvec::{BitVec, Msb0};
use deku::ctx::BitSize;
use deku::writer::Writer;
use deku::{prelude::*, DekuWriter};
Expand Down
73 changes: 73 additions & 0 deletions examples/ieee.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
use deku::ctx::Order;
use deku::prelude::*;

use std::convert::TryFrom;

#[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(type = "u8", bits = "2")]
#[deku(bit_order = "ctx_lsb", ctx = "ctx_lsb: Order")]
pub enum FrameType {
#[deku(id = "0")]
Management,
#[deku(id = "1")]
Control,
#[deku(id = "2")]
Data,
}

#[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(bit_order = "ctx_lsb", ctx = "ctx_lsb: Order")]
pub struct Flags {
#[deku(bits = 1)]
pub to_ds: u8,
#[deku(bits = 1)]
pub from_ds: u8,
#[deku(bits = 1)]
pub more_fragments: u8,
#[deku(bits = 1)]
pub retry: u8,
#[deku(bits = 1)]
pub power_management: u8,
#[deku(bits = 1)]
pub more_data: u8,
#[deku(bits = 1)]
pub protected_frame: u8,
#[deku(bits = 1)]
pub order: u8,
}

#[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(bit_order = "lsb")]
pub struct FrameControl {
#[deku(bits = 4)]
pub sub_type: u8,
#[deku(bits = 2)]
pub protocol_version: u8,
pub frame_type: FrameType,

pub flags: Flags,
}

fn main() {
let data = vec![0x88u8, 0x41];
let control_frame = FrameControl::try_from(data.as_ref()).unwrap();
assert_eq!(
control_frame,
FrameControl {
protocol_version: 0,
frame_type: FrameType::Data,
sub_type: 8,

flags: Flags {
to_ds: 1,
from_ds: 0,
more_fragments: 0,
retry: 0,
power_management: 0,
more_data: 0,
protected_frame: 1,
order: 0,
}
}
);
}
87 changes: 87 additions & 0 deletions src/attributes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ enum DekuEnum {
| Attribute | Scope | Description
|-----------|------------------|------------
| [endian](#endian) | top-level, field | Set the endianness
| [bit_order](#bit_order) | top-level, field | Set the field representing the order in which to read the bits
| [magic](#magic) | top-level | A magic value that must be present at the start of this struct/enum
| [assert](#assert) | field | Assert a condition
| [assert_eq](#assert_eq) | field | Assert equals on the field
Expand Down Expand Up @@ -141,6 +142,92 @@ assert_eq!(
let value: Vec<u8> = value.try_into().unwrap();
assert_eq!(&*data, value);
```
# bit_order
Specify the field or containers bit order. By default all bits are read in `Msb0` (Most significant bit) order.
### Top-Level Example
```rust
# use deku::prelude::*;
# use std::convert::{TryInto, TryFrom};
# #[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(bit_order = "lsb")]
pub struct SquashfsV3 {
#[deku(bits = "4")]
inode_type: u32,
#[deku(bits = "12")]
mode: u32,
#[deku(bits = "8")]
uid: u32,
#[deku(bits = "8")]
guid: u32,
mtime: u32,
inode_number: u32,
}
let data: &[u8] = &[
0x31, 0x12, 0x04, 0x05, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,
];
let header = SquashfsV3::try_from(data).unwrap();
assert_eq!(
SquashfsV3 {
inode_type: 0x01,
mode: 0x123,
uid: 0x4,
guid: 0x5,
mtime: 0x6,
inode_number: 0x7
},
header,
);
```
With endian-ness:
```rust
# use deku::prelude::*;
# use std::convert::{TryInto, TryFrom};
# #[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(endian = "big", bit_order = "lsb")]
pub struct BigEndian {
#[deku(bits = "13")]
offset: u16,
#[deku(bits = "3")]
t: u8,
}
let data = vec![0x40, 0x40];
let big_endian = BigEndian::try_from(data.as_ref()).unwrap();
assert_eq!(
big_endian,
BigEndian {
offset: 0x4000,
t: 2
}
);
let bytes = big_endian.to_bytes().unwrap();
assert_eq!(bytes, data);
````
### Field Example
```rust
# use deku::prelude::*;
# use std::convert::{TryInto, TryFrom};
# #[derive(Debug, DekuRead, DekuWrite, PartialEq)]
pub struct LsbField {
#[deku(bit_order = "lsb", bits = "13")]
offset: u16,
#[deku(bit_order = "lsb", bits = "3")]
t: u8,
}
let data = vec![0x40, 0x40];
let more_first = LsbField::try_from(data.as_ref()).unwrap();
assert_eq!(more_first, LsbField { offset: 0x40, t: 2 });
let bytes = more_first.to_bytes().unwrap();
assert_eq!(bytes, data);
```
# magic
Expand Down
Loading

0 comments on commit 27866fa

Please sign in to comment.