Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: Impl reader bit order #367

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ alloc_counter = "0.0.4"
trybuild = "1.0.77"
rustc-hash = "1.1.0"
env_logger = "0.10.0"
log = "0.4.17"

[[bench]]
name = "deku"
Expand Down
18 changes: 18 additions & 0 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,9 @@ struct DekuData {

/// enum only: byte size of the enum `id`
bytes: Option<Num>,

/// Bit Order for all fields
bit_order: Option<syn::LitStr>,
}

impl DekuData {
Expand Down Expand Up @@ -184,13 +187,15 @@ impl DekuData {
id_type: receiver.id_type?,
bits: receiver.bits,
bytes: receiver.bytes,
bit_order: receiver.bit_order,
};

DekuData::validate(&data)?;

Ok(data)
}

// TODO: Add #[bit_order] require #[bytes]
fn validate(data: &DekuData) -> Result<(), TokenStream> {
// Validate `ctx_default`
if data.ctx_default.is_some() && data.ctx.is_none() {
Expand Down Expand Up @@ -315,6 +320,7 @@ impl<'a> TryFrom<&'a DekuData> for DekuDataEnum<'a> {
deku_data.endian.as_ref(),
deku_data.bits.as_ref(),
deku_data.bytes.as_ref(),
deku_data.bit_order.as_ref(),
)?;

Ok(Self {
Expand Down Expand Up @@ -434,6 +440,9 @@ struct FieldData {

// assert value of field
assert_eq: Option<TokenStream>,

/// Bit Order of field
bit_order: Option<syn::LitStr>,
}

impl FieldData {
Expand Down Expand Up @@ -470,6 +479,7 @@ impl FieldData {
cond: receiver.cond?,
assert: receiver.assert?,
assert_eq: receiver.assert_eq?,
bit_order: receiver.bit_order,
};

FieldData::validate(&data)?;
Expand Down Expand Up @@ -649,6 +659,10 @@ struct DekuReceiver {
/// enum only: byte size of the enum `id`
#[darling(default)]
bytes: Option<Num>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

type ReplacementError = TokenStream;
Expand Down Expand Up @@ -825,6 +839,10 @@ struct DekuFieldReceiver {
// assert value of field
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
assert_eq: Result<Option<TokenStream>, ReplacementError>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

/// Receiver for the variant-level attributes inside a enum
Expand Down
8 changes: 6 additions & 2 deletions deku-derive/src/macros/deku_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -517,9 +517,11 @@ fn emit_padding(bit_size: &TokenStream) -> TokenStream {
if (__deku_pad % 8) == 0 {
let bytes_read = __deku_pad / 8;
let mut buf = vec![0; bytes_read];
let _ = __deku_reader.read_bytes(bytes_read, &mut buf)?;
// TODO: use skip_bytes, or Seek in the future?
let _ = __deku_reader.read_bytes(bytes_read, &mut buf, ::#crate_::ctx::Order::Msb0)?;
} else {
let _ = __deku_reader.read_bits(__deku_pad)?;
// TODO: use skip_bits, or Seek in the future?
let _ = __deku_reader.read_bits(__deku_pad, ::#crate_::ctx::Order::Msb0)?;
}
}
}
Expand All @@ -536,6 +538,7 @@ fn emit_field_read(
let field_type = &f.ty;

let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

let field_reader = &f.reader;

Expand Down Expand Up @@ -617,6 +620,7 @@ fn emit_field_read(
f.bits.as_ref(),
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

// The __deku_reader limiting options are special, we need to generate `(limit, (other, ..))` for them.
Expand Down
2 changes: 2 additions & 0 deletions deku-derive/src/macros/deku_write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -473,6 +473,7 @@ fn emit_field_write(
) -> Result<TokenStream, syn::Error> {
let crate_ = super::get_crate_name();
let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

// fields to check usage of bit/byte offset
let field_check_vars = [
Expand Down Expand Up @@ -530,6 +531,7 @@ fn emit_field_write(
f.bits.as_ref(),
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

if f.temp {
Expand Down
46 changes: 38 additions & 8 deletions deku-derive/src/macros/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -238,17 +238,24 @@ pub(crate) fn gen_id_args(
endian: Option<&syn::LitStr>,
bits: Option<&Num>,
bytes: Option<&Num>,
bit_order: Option<&syn::LitStr>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::BitSize(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::ByteSize(#n)});
let bit_order = bit_order.map(gen_bit_order_from_str).transpose()?;

// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let id_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
let id_args = [
endian.as_ref(),
bits.as_ref(),
bytes.as_ref(),
bit_order.as_ref(),
]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();

match &id_args[..] {
[arg] => Ok(quote! {#arg}),
Expand All @@ -265,18 +272,27 @@ fn gen_field_args(
bits: Option<&Num>,
bytes: Option<&Num>,
ctx: Option<&Punctuated<syn::Expr, syn::token::Comma>>,
bit_order: Option<&syn::LitStr>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::BitSize(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::ByteSize(#n)});
let bit_order = bit_order.map(gen_bit_order_from_str).transpose()?;
let ctx = ctx.map(|c| quote! {#c});

// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let field_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref(), ctx.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
// TODO: the order here should be documented
let field_args = [
endian.as_ref(),
bits.as_ref(),
bytes.as_ref(),
bit_order.as_ref(),
ctx.as_ref(),
]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();

// Because `impl DekuRead<'_, (T1, T2)>` but `impl DekuRead<'_, T1>`(not tuple)
match &field_args[..] {
Expand All @@ -299,6 +315,20 @@ fn gen_endian_from_str(s: &syn::LitStr) -> syn::Result<TokenStream> {
}
}

/// Generate bit_order tokens from string: `lsb` -> `Order::Lsb0`.
fn gen_bit_order_from_str(s: &syn::LitStr) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
match s.value().as_str() {
"lsb" => Ok(quote! {::#crate_::ctx::Order::Lsb0}),
"msb" => Ok(quote! {::#crate_::ctx::Order::Msb0}),
_ => {
// treat as variable, possibly from `ctx`
let v: TokenStream = s.value().parse()?;
Ok(quote! {#v})
}
}
}

/// Wraps a TokenStream with a closure providing access to `ctx` variables when
/// `ctx_default` is provided
fn wrap_default_ctx(
Expand Down
1 change: 0 additions & 1 deletion ensure_no_std/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,3 @@ alloc = []
cortex-m-rt = "0.7.3"
deku = { path = "../", default-features = false, features = ["alloc"] }
embedded-alloc = "0.5.0"

73 changes: 73 additions & 0 deletions examples/ieee.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
use deku::ctx::Order;
use deku::prelude::*;
use hexlit::hex;
use std::convert::TryFrom;

#[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(type = "u8", bits = "2")]
#[deku(bit_order = "ctx_lsb", ctx = "ctx_lsb: Order")]
pub enum FrameType {
#[deku(id = "0")]
Management,
#[deku(id = "1")]
Control,
#[deku(id = "2")]
Data,
}

#[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(bit_order = "ctx_lsb", ctx = "ctx_lsb: Order")]
pub struct Flags {
#[deku(bits = 1)]
pub to_ds: u8,
#[deku(bits = 1)]
pub from_ds: u8,
#[deku(bits = 1)]
pub more_fragments: u8,
#[deku(bits = 1)]
pub retry: u8,
#[deku(bits = 1)]
pub power_management: u8,
#[deku(bits = 1)]
pub more_data: u8,
#[deku(bits = 1)]
pub protected_frame: u8,
#[deku(bits = 1)]
pub order: u8,
}

#[derive(Debug, DekuRead, DekuWrite, PartialEq)]
#[deku(bit_order = "lsb")]
pub struct FrameControl {
#[deku(bits = 4)]
pub sub_type: u8,
#[deku(bits = 2)]
pub protocol_version: u8,
pub frame_type: FrameType,

pub flags: Flags,
}

fn main() {
let data = vec![0x88u8, 0x41];
let control_frame = FrameControl::try_from(data.as_ref()).unwrap();
assert_eq!(
control_frame,
FrameControl {
protocol_version: 0,
frame_type: FrameType::Data,
sub_type: 8,

flags: Flags {
to_ds: 1,
from_ds: 0,
more_fragments: 0,
retry: 0,
power_management: 0,
more_data: 0,
protected_frame: 1,
order: 0,
}
}
);
}
6 changes: 6 additions & 0 deletions src/ctx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@
use core::marker::PhantomData;
use core::str::FromStr;

#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Order {
Msb0,
Lsb0,
}

/// An endian
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Endian {
Expand Down
31 changes: 31 additions & 0 deletions src/impls/nonzero.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,42 @@ macro_rules! ImplDekuTraitsCtx {
};
}

macro_rules! ImplDekuTraitsCtxOrder {
($typ:ty, $readtype:ty, $ctx_arg:tt, $ctx_type:tt) => {
impl DekuReader<'_, $ctx_type> for $typ {
fn from_reader_with_ctx<R: Read>(
reader: &mut crate::reader::Reader<R>,
$ctx_arg: $ctx_type,
) -> Result<Self, DekuError> {
let value = <$readtype>::from_reader_with_ctx(reader, $ctx_arg)?;
let value = <$typ>::new(value);

match value {
None => Err(DekuError::Parse(format!("NonZero assertion"))),
Some(v) => Ok(v),
}
}
}
};
}

macro_rules! ImplDekuTraits {
($typ:ty, $readtype:ty) => {
ImplDekuTraitsCtx!($typ, $readtype, (), ());
ImplDekuTraitsCtx!($typ, $readtype, (endian, bitsize), (Endian, BitSize));
ImplDekuTraitsCtx!($typ, $readtype, (endian, bytesize), (Endian, ByteSize));
ImplDekuTraitsCtxOrder!(
$typ,
$readtype,
(endian, bitsize, order),
(Endian, BitSize, Order)
);
ImplDekuTraitsCtxOrder!(
$typ,
$readtype,
(endian, bytesize, order),
(Endian, ByteSize, Order)
);
ImplDekuTraitsCtx!($typ, $readtype, endian, Endian);
};
}
Expand Down
Loading
Loading