Skip to content

Commit

Permalink
Add bit_order support
Browse files Browse the repository at this point in the history
  • Loading branch information
wcampbell0x2a committed Dec 2, 2023
1 parent 91bb702 commit 67027f0
Show file tree
Hide file tree
Showing 29 changed files with 1,270 additions and 175 deletions.
88 changes: 49 additions & 39 deletions benches/deku.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,17 @@ struct DekuBits {
data_03: u8,
}

#[derive(Debug, PartialEq, DekuRead, DekuWrite)]
#[deku(bit_order = "lsb")]
struct DekuBitsLsb {
#[deku(bits = "1")]
data_01: u8,
#[deku(bits = "2")]
data_02: u8,
#[deku(bits = "5")]
data_03: u8,
}

#[derive(Debug, PartialEq, DekuRead, DekuWrite)]
struct DekuBytes {
data_00: u8,
Expand All @@ -27,46 +38,22 @@ enum DekuEnum {
VariantA(u8),
}

#[derive(Debug, PartialEq, DekuRead, DekuWrite)]
#[derive(Debug, PartialEq, DekuRead, DekuWrite, Clone)]
struct DekuVec {
count: u8,
#[deku(count = "count")]
data: Vec<u8>,
}

fn deku_read_bits(mut reader: impl Read) {
let mut reader = Reader::new(&mut reader);
let _v = DekuBits::from_reader_with_ctx(&mut reader, ()).unwrap();
}

fn deku_write_bits(input: &DekuBits) {
let _v = input.to_bytes().unwrap();
}

fn deku_read_byte(mut reader: impl Read) {
let mut reader = Reader::new(&mut reader);
let _v = DekuBytes::from_reader_with_ctx(&mut reader, ()).unwrap();
}

fn deku_write_byte(input: &DekuBytes) {
let _v = input.to_bytes().unwrap();
}

fn deku_read_enum(mut reader: impl Read) {
let mut reader = Reader::new(&mut reader);
let _v = DekuEnum::from_reader_with_ctx(&mut reader, ()).unwrap();
}

fn deku_write_enum(input: &DekuEnum) {
let _v = input.to_bytes().unwrap();
}

fn deku_read_vec(mut reader: impl Read) {
fn deku_read<T>(mut reader: impl Read)
where
T: for<'a> DekuReader<'a>,
{
let mut reader = Reader::new(&mut reader);
let _v = DekuVec::from_reader_with_ctx(&mut reader, ()).unwrap();
let _v = <T>::from_reader_with_ctx(&mut reader, ()).unwrap();
}

fn deku_write_vec(input: &DekuVec) {
fn deku_write(input: impl DekuWriter + DekuContainerWrite) {
let _v = input.to_bytes().unwrap();
}

Expand All @@ -75,30 +62,49 @@ fn criterion_benchmark(c: &mut Criterion) {
let reader = Cursor::new(&[0x01; 1 + 2 + 4]);
b.iter_batched(
|| reader.clone(),
|mut reader| deku_read_byte(&mut reader),
|mut reader| deku_read::<DekuBytes>(&mut reader),
BatchSize::SmallInput,
)
});
c.bench_function("deku_write_byte", |b| {
b.iter(|| {
deku_write_byte(black_box(&DekuBytes {
deku_write(black_box(DekuBytes {
data_00: 0x00,
data_01: 0x02,
data_02: 0x03,
}))
})
});

c.bench_function("deku_read_bits", |b| {
let reader = Cursor::new(&[0x01; 1]);
b.iter_batched(
|| reader.clone(),
|mut reader| deku_read_bits(&mut reader),
|mut reader| deku_read::<DekuBits>(&mut reader),
BatchSize::SmallInput,
)
});
c.bench_function("deku_write_bits", |b| {
b.iter(|| {
deku_write_bits(black_box(&DekuBits {
deku_write(black_box(DekuBits {
data_01: 0x0f,
data_02: 0x00,
data_03: 0x01,
}))
})
});

c.bench_function("deku_read_bits_lsb", |b| {
let reader = Cursor::new(&[0x01; 1]);
b.iter_batched(
|| reader.clone(),
|mut reader| deku_read::<DekuBitsLsb>(&mut reader),
BatchSize::SmallInput,
)
});
c.bench_function("deku_write_bits_lsb", |b| {
b.iter(|| {
deku_write(black_box(DekuBitsLsb {
data_01: 0x0f,
data_02: 0x00,
data_03: 0x01,
Expand All @@ -110,12 +116,12 @@ fn criterion_benchmark(c: &mut Criterion) {
let reader = Cursor::new(&[0x01; 2]);
b.iter_batched(
|| reader.clone(),
|mut reader| deku_read_enum(&mut reader),
|mut reader| deku_read::<DekuEnum>(&mut reader),
BatchSize::SmallInput,
)
});
c.bench_function("deku_write_enum", |b| {
b.iter(|| deku_write_enum(black_box(&DekuEnum::VariantA(0x02))))
b.iter(|| deku_write(black_box(DekuEnum::VariantA(0x02))))
});

let deku_write_vec_input = DekuVec {
Expand All @@ -126,12 +132,16 @@ fn criterion_benchmark(c: &mut Criterion) {
let reader = Cursor::new(&[0x08; 8 + 1]);
b.iter_batched(
|| reader.clone(),
|mut reader| deku_read_vec(&mut reader),
|mut reader| deku_read::<DekuVec>(&mut reader),
BatchSize::SmallInput,
)
});
c.bench_function("deku_write_vec", |b| {
b.iter(|| deku_write_vec(black_box(&deku_write_vec_input)))
b.iter_batched(
|| deku_write_vec_input.clone(),
|deku_write_vec_input| deku_write(black_box(deku_write_vec_input)),
BatchSize::SmallInput,
)
});
}

Expand Down
18 changes: 18 additions & 0 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,9 @@ struct DekuData {

/// enum only: byte size of the enum `id`
bytes: Option<Num>,

/// Bit Order for all fields
bit_order: Option<syn::LitStr>,
}

impl DekuData {
Expand Down Expand Up @@ -184,13 +187,15 @@ impl DekuData {
id_type: receiver.id_type?,
bits: receiver.bits,
bytes: receiver.bytes,
bit_order: receiver.bit_order,
};

DekuData::validate(&data)?;

Ok(data)
}

// TODO: Add #[bit_order] require #[bytes]
fn validate(data: &DekuData) -> Result<(), TokenStream> {
// Validate `ctx_default`
if data.ctx_default.is_some() && data.ctx.is_none() {
Expand Down Expand Up @@ -315,6 +320,7 @@ impl<'a> TryFrom<&'a DekuData> for DekuDataEnum<'a> {
deku_data.endian.as_ref(),
deku_data.bits.as_ref(),
deku_data.bytes.as_ref(),
deku_data.bit_order.as_ref(),
)?;

Ok(Self {
Expand Down Expand Up @@ -434,6 +440,9 @@ struct FieldData {

// assert value of field
assert_eq: Option<TokenStream>,

/// Bit Order of field
bit_order: Option<syn::LitStr>,
}

impl FieldData {
Expand Down Expand Up @@ -470,6 +479,7 @@ impl FieldData {
cond: receiver.cond?,
assert: receiver.assert?,
assert_eq: receiver.assert_eq?,
bit_order: receiver.bit_order,
};

FieldData::validate(&data)?;
Expand Down Expand Up @@ -649,6 +659,10 @@ struct DekuReceiver {
/// enum only: byte size of the enum `id`
#[darling(default)]
bytes: Option<Num>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

type ReplacementError = TokenStream;
Expand Down Expand Up @@ -825,6 +839,10 @@ struct DekuFieldReceiver {
// assert value of field
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
assert_eq: Result<Option<TokenStream>, ReplacementError>,

/// Bit Order of field
#[darling(default)]
bit_order: Option<syn::LitStr>,
}

/// Receiver for the variant-level attributes inside a enum
Expand Down
8 changes: 6 additions & 2 deletions deku-derive/src/macros/deku_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -517,9 +517,11 @@ fn emit_padding(bit_size: &TokenStream) -> TokenStream {
if (__deku_pad % 8) == 0 {
let bytes_read = __deku_pad / 8;
let mut buf = vec![0; bytes_read];
let _ = __deku_reader.read_bytes(bytes_read, &mut buf)?;
// TODO: use skip_bytes, or Seek in the future?
let _ = __deku_reader.read_bytes(bytes_read, &mut buf, ::#crate_::ctx::Order::Msb0)?;
} else {
let _ = __deku_reader.read_bits(__deku_pad)?;
// TODO: use skip_bits, or Seek in the future?
let _ = __deku_reader.read_bits(__deku_pad, ::#crate_::ctx::Order::Msb0)?;
}
}
}
Expand All @@ -536,6 +538,7 @@ fn emit_field_read(
let field_type = &f.ty;

let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

let field_reader = &f.reader;

Expand Down Expand Up @@ -617,6 +620,7 @@ fn emit_field_read(
f.bits.as_ref(),
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

// The __deku_reader limiting options are special, we need to generate `(limit, (other, ..))` for them.
Expand Down
2 changes: 2 additions & 0 deletions deku-derive/src/macros/deku_write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,7 @@ fn emit_field_write(
) -> Result<TokenStream, syn::Error> {
let crate_ = super::get_crate_name();
let field_endian = f.endian.as_ref().or(input.endian.as_ref());
let field_bit_order = f.bit_order.as_ref().or(input.bit_order.as_ref());

// fields to check usage of bit/byte offset
let field_check_vars = [
Expand Down Expand Up @@ -483,6 +484,7 @@ fn emit_field_write(
f.bits.as_ref(),
f.bytes.as_ref(),
f.ctx.as_ref(),
field_bit_order,
)?;

if f.temp {
Expand Down
46 changes: 38 additions & 8 deletions deku-derive/src/macros/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -238,17 +238,24 @@ pub(crate) fn gen_id_args(
endian: Option<&syn::LitStr>,
bits: Option<&Num>,
bytes: Option<&Num>,
bit_order: Option<&syn::LitStr>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::BitSize(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::ByteSize(#n)});
let bit_order = bit_order.map(gen_bit_order_from_str).transpose()?;

// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let id_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
let id_args = [
endian.as_ref(),
bits.as_ref(),
bytes.as_ref(),
bit_order.as_ref(),
]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();

match &id_args[..] {
[arg] => Ok(quote! {#arg}),
Expand All @@ -265,18 +272,27 @@ fn gen_field_args(
bits: Option<&Num>,
bytes: Option<&Num>,
ctx: Option<&Punctuated<syn::Expr, syn::token::Comma>>,
bit_order: Option<&syn::LitStr>,
) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {::#crate_::ctx::BitSize(#n)});
let bytes = bytes.map(|n| quote! {::#crate_::ctx::ByteSize(#n)});
let bit_order = bit_order.map(gen_bit_order_from_str).transpose()?;
let ctx = ctx.map(|c| quote! {#c});

// FIXME: Should be `into_iter` here, see https://github.com/rust-lang/rust/issues/66145.
let field_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref(), ctx.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
// TODO: the order here should be documented
let field_args = [
endian.as_ref(),
bits.as_ref(),
bytes.as_ref(),
bit_order.as_ref(),
ctx.as_ref(),
]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();

// Because `impl DekuRead<'_, (T1, T2)>` but `impl DekuRead<'_, T1>`(not tuple)
match &field_args[..] {
Expand All @@ -299,6 +315,20 @@ fn gen_endian_from_str(s: &syn::LitStr) -> syn::Result<TokenStream> {
}
}

/// Generate bit_order tokens from string: `lsb` -> `Order::Lsb0`.
fn gen_bit_order_from_str(s: &syn::LitStr) -> syn::Result<TokenStream> {
let crate_ = get_crate_name();
match s.value().as_str() {
"lsb" => Ok(quote! {::#crate_::ctx::Order::Lsb0}),
"msb" => Ok(quote! {::#crate_::ctx::Order::Msb0}),
_ => {
// treat as variable, possibly from `ctx`
let v: TokenStream = s.value().parse()?;
Ok(quote! {#v})
}
}
}

/// Wraps a TokenStream with a closure providing access to `ctx` variables when
/// `ctx_default` is provided
fn wrap_default_ctx(
Expand Down
1 change: 0 additions & 1 deletion ensure_no_std/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,3 @@ alloc = []
cortex-m-rt = "0.7.3"
deku = { path = "../", default-features = false, features = ["alloc"] }
embedded-alloc = "0.5.0"

1 change: 0 additions & 1 deletion examples/custom_reader_and_writer.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::convert::TryInto;

use deku::bitvec::{BitVec, Msb0};
use deku::ctx::BitSize;
use deku::writer::Writer;
use deku::{prelude::*, DekuWriter};
Expand Down
Loading

0 comments on commit 67027f0

Please sign in to comment.