From 1d232363357f0cfd3ddec2f60d511ed675d5eb45 Mon Sep 17 00:00:00 2001 From: wcampbell Date: Tue, 6 Jun 2023 00:22:12 -0400 Subject: [PATCH] Implement from_reader Stream support MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add from_reader and use Read internally to improve the codegen of deku, the performance of derived parsers. Internally, this is implemented using the new src/reader.rs interface in deku parses. This has a waterfall effect on DekuReader implementations and the deku-derive/deku_read Derived impl. Previous usage with from_bytes and other API's are unchanged. There is somewhat of a performance hit for bit-only parses, but I find the major improvments in the bytes-wise parsing to be great enough to warrent this change. The following are some sample benchmarks: > critcmp before after group after before ----- ----- ------ deku_read_bits 1.24 845.8±14.29ns ? ?/sec 1.00 679.5±11.83ns ? ?/sec deku_read_byte 1.00 17.8±0.25ns ? ?/sec 2.12 37.8±3.35ns ? ?/sec deku_read_enum 1.00 15.3±0.15ns ? ?/sec 2.04 31.2±0.81ns ? ?/sec deku_read_vec 1.00 676.8±7.04ns ? ?/sec 2.16 1459.5±40.22ns ? ?/sec deku_write_bits 1.00 125.3±3.10ns ? ?/sec 1.04 130.2±11.12ns ? ?/sec deku_write_byte 1.00 161.6±4.86ns ? ?/sec 1.02 165.0±5.91ns ? ?/sec deku_write_enum 1.00 105.6±1.06ns ? ?/sec 1.03 109.0±7.20ns ? ?/sec deku_write_vec 1.00 4.6±0.04µs ? ?/sec 1.06 4.9±0.07µs ? ?/sec The above change removes DekuRead, and replaces it with DekuReader. This contains the from_reader_with_ctx. DekuContainerRead contains from_reader. The crate no_std_io was picked to supply a Read impl for the no_std feature. These are "re-export"ed. Add "`Read` enabled" docs to lib.rs Add tests/test_tuple.rs tests Update CHANGELOG.md to reflect changes and help migration to this usage Use llvm-cov in ci for the generation of more accurate coverage reports Update benchmarks to test more complex parser speeds Disable Miri CI Update ensure_no_std to work with new Read usage. Remove wee-alloc in favour of an updated crate for the allocator. Add inline to small functions --- .github/workflows/coverage.yml | 22 + .github/workflows/main.yml | 69 +- CHANGELOG.md | 129 ++++ Cargo.toml | 7 +- benches/deku.rs | 111 +-- deku-derive/src/lib.rs | 20 +- deku-derive/src/macros/deku_read.rs | 270 +++++--- deku-derive/src/macros/deku_write.rs | 10 +- deku-derive/src/macros/mod.rs | 3 +- ensure_no_std/Cargo.toml | 4 +- ensure_no_std/src/bin/main.rs | 67 +- ensure_wasm/src/lib.rs | 3 +- ensure_wasm/tests/deku.rs | 2 +- examples/custom_reader_and_writer.rs | 24 +- examples/deku_input.rs | 43 ++ examples/enums.rs | 11 +- examples/enums_catch_all.rs | 4 +- examples/example.rs | 37 +- examples/ipv4.rs | 15 +- examples/many.rs | 32 + src/attributes.rs | 169 +++-- src/ctx.rs | 23 +- src/error.rs | 3 +- src/impls/bool.rs | 46 +- src/impls/boxed.rs | 98 +-- src/impls/cow.rs | 43 +- src/impls/cstring.rs | 61 +- src/impls/hashmap.rs | 246 ++++--- src/impls/hashset.rs | 227 +++--- src/impls/ipaddr.rs | 94 ++- src/impls/nonzero.rs | 38 +- src/impls/option.rs | 49 +- src/impls/primitive.rs | 648 +++++++++++------- src/impls/slice.rs | 319 ++------- src/impls/tuple.rs | 41 +- src/impls/unit.rs | 30 +- src/impls/vec.rs | 183 ++--- src/lib.rs | 138 +++- src/prelude.rs | 5 +- src/reader.rs | 275 ++++++++ tests/test_alloc.rs | 14 +- tests/test_attributes/test_assert.rs | 3 +- tests/test_attributes/test_assert_eq.rs | 3 +- tests/test_attributes/test_cond.rs | 7 +- tests/test_attributes/test_ctx.rs | 50 +- .../test_limits/test_bits_read.rs | 41 +- .../test_limits/test_bytes_read.rs | 43 +- .../test_attributes/test_limits/test_count.rs | 53 +- .../test_attributes/test_limits/test_until.rs | 51 +- tests/test_attributes/test_map.rs | 5 +- tests/test_attributes/test_padding/mod.rs | 19 +- .../test_padding/test_pad_bits_after.rs | 9 +- .../test_padding/test_pad_bits_before.rs | 9 +- .../test_padding/test_pad_bytes_after.rs | 25 +- .../test_padding/test_pad_bytes_before.rs | 25 +- tests/test_attributes/test_skip.rs | 11 +- tests/test_attributes/test_temp.rs | 19 +- tests/test_attributes/test_update.rs | 15 +- tests/test_catch_all.rs | 24 +- .../test_compile/cases/internal_variables.rs | 42 +- .../cases/internal_variables.stderr | 4 +- .../test_compile/cases/unknown_endian.stderr | 23 +- tests/test_enum.rs | 65 +- tests/test_from_reader.rs | 59 ++ tests/test_generic.rs | 33 +- tests/test_magic.rs | 10 +- tests/test_regression.rs | 144 +++- tests/test_struct.rs | 75 +- tests/test_tuple.rs | 24 + 69 files changed, 2732 insertions(+), 1792 deletions(-) create mode 100644 .github/workflows/coverage.yml create mode 100644 examples/deku_input.rs create mode 100644 examples/many.rs create mode 100644 src/reader.rs create mode 100644 tests/test_from_reader.rs create mode 100644 tests/test_tuple.rs diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 00000000..e2564bff --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,22 @@ +name: Coverage + +on: [pull_request, push] + +jobs: + coverage: + runs-on: ubuntu-latest + env: + CARGO_TERM_COLOR: always + steps: + - uses: actions/checkout@v3 + - name: Install Rust + run: rustup update stable + - name: Install cargo-llvm-cov + uses: taiki-e/install-action@cargo-llvm-cov + - name: Generate code coverage + run: cargo llvm-cov --workspace --codecov --output-path codecov.json + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + files: codecov.json + fail_ci_if_error: true diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bf2e8057..3b3c36a2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -33,30 +33,31 @@ jobs: command: test args: --all - test_miri: - name: Miri Test - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly - override: true - components: miri - - run: cargo miri test - - test_miri_big_endian: - name: Miri Test Big Endian - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly - override: true - components: miri - target: mips64-unknown-linux-gnuabi64 - - run: cargo miri test --target mips64-unknown-linux-gnuabi64 +# TODO: Enable Miri +# test_miri: +# name: Miri Test +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# - uses: actions-rs/toolchain@v1 +# with: +# toolchain: nightly +# override: true +# components: miri +# - run: cargo miri test +# +# test_miri_big_endian: +# name: Miri Test Big Endian +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# - uses: actions-rs/toolchain@v1 +# with: +# toolchain: nightly +# override: true +# components: miri +# target: armebv7r-none-eabi +# - run: cargo miri test --target armebv7r-none-eabi examples: name: Examples @@ -111,7 +112,8 @@ jobs: with: toolchain: nightly override: true - - run: cd ensure_no_std && cargo run --release + target: thumbv7em-none-eabihf + - run: cd ensure_no_std && cargo build --release --target thumbv7em-none-eabihf ensure_wasm: name: Ensure wasm @@ -126,20 +128,3 @@ jobs: with: version: 'latest' - run: cd ensure_wasm && wasm-pack build --target web && wasm-pack test --node - - coverage: - name: Coverage - runs-on: ubuntu-latest - container: - image: xd009642/tarpaulin:develop - options: --security-opt seccomp=unconfined - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Generate code coverage - run: | - cargo tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml - - - name: Upload to codecov.io - uses: codecov/codecov-action@v1 diff --git a/CHANGELOG.md b/CHANGELOG.md index b2e7a08c..18bab2fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,135 @@ ## [Unreleased] +## Changes +[#352](https://github.com/sharksforarms/deku/pull/352) added a new function `from_reader` that uses `io::Read`. +`io::Read` is also now used internally, bringing massive performance and usability improvements. + +### New `from_reader` +```rust +use std::io::{Seek, SeekFrom, Read}; +use std::fs::File; +use deku::prelude::*; + +#[derive(Debug, DekuRead, DekuWrite, PartialEq, Eq, Clone, Hash)] +#[deku(endian = "big")] +struct EcHdr { + magic: [u8; 4], + version: u8, + padding1: [u8; 3], +} + +let mut file = File::options().read(true).open("file").unwrap(); +let ec = EcHdr::from_reader((&mut file, 0)).unwrap(); +``` + +- The more internal (with context) `read(..)` was replaced with `from_reader_with_ctx(..)`. +With the switch to internal streaming, the variables `deku::input`, `deku::input_bits`, and `deku::rest` are now not possible and were removed. +`deku::reader` is a replacement for some of the functionality. +See [examples/deku_input.rs](examples/deku_input.rs) for a new example of caching all reads. + +old: +```rust +#[derive(Debug, PartialEq, DekuRead, DekuWrite)] +struct DekuTest { + field_a: u8, + + #[deku( + reader = "bit_flipper_read(*field_a, deku::rest, BitSize(8))", + )] + field_b: u8, +} + +fn custom_read( + field_a: u8, + rest: &BitSlice, + bit_size: BitSize, +) -> Result<(&BitSlice, u8), DekuError> { + + // read field_b, calling original func + let (rest, value) = u8::read(rest, bit_size)?; + + Ok((rest, value)) +} +``` + +new: +```rust +#[derive(Debug, PartialEq, DekuRead, DekuWrite)] +struct DekuTest { + field_a: u8, + + #[deku( + reader = "bit_flipper_read(*field_a, deku::reader, BitSize(8))", + )] + field_b: u8, +} + +fn custom_read( + field_a: u8, + reader: &mut Reader, + bit_size: BitSize, +) -> Result { + + // read field_b, calling original func + let value = u8::from_reader_with_ctx(reader, bit_size)?; + + Ok(value) +} +``` + +- With the addition of using `Read`, containing a byte slice with a reference is not supported: + +old +```rust +#[derive(PartialEq, Debug, DekuRead, DekuWrite)] +struct TestStruct<'a> { + bytes: u8, + + #[deku(bytes_read = "bytes")] + data: &'a [u8], +} +``` + +new +```rust +#[derive(PartialEq, Debug, DekuRead, DekuWrite)] +struct TestStruct { + bytes: u8, + + #[deku(bytes_read = "bytes")] + data: Vec, +} +``` + +- `id_pat` is now required to be the same type as stored id. +This also disallows using tuples for storing the id: + +old: +```rust +#[derive(PartialEq, Debug, DekuRead, DekuWrite)] +#[deku(type = "u8")] +enum DekuTest { + #[deku(id_pat = "_")] + VariantC((u8, u8)), +} +``` + +new: +```rust +#[derive(PartialEq, Debug, DekuRead, DekuWrite)] +#[deku(type = "u8")] +enum DekuTest { + #[deku(id_pat = "_")] + VariantC { + id: u8, + other: u8, + }, +} +``` + +- The feature `const_generics` was removed and is enabled by default. + ## [0.16.0] - 2023-02-28 ### Changes diff --git a/Cargo.toml b/Cargo.toml index 83b400af..00289dbe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ keywords = ["deku", "bits", "serialization", "deserialization", "struct"] categories = ["encoding", "parsing", "no-std"] description = "bit level serialization/deserialization proc-macro for structs" readme = "README.md" +rust-version = "1.65.0" [lib] bench = false @@ -19,16 +20,16 @@ members = [ ] [features] -default = ["std", "const_generics"] -std = ["deku_derive/std", "bitvec/std", "alloc"] +default = ["std"] +std = ["deku_derive/std", "bitvec/std", "alloc", "no_std_io/std"] alloc = ["bitvec/alloc"] logging = ["deku_derive/logging", "log"] -const_generics = [] [dependencies] deku_derive = { version = "^0.16.0", path = "deku-derive", default-features = false} bitvec = { version = "1.0.1", default-features = false } log = { version = "0.4.17", optional = true } +no_std_io = { version = "0.5.0", default-features = false, features = ["alloc"] } [dev-dependencies] rstest = "0.16.0" diff --git a/benches/deku.rs b/benches/deku.rs index 72106c36..4243ff51 100644 --- a/benches/deku.rs +++ b/benches/deku.rs @@ -1,17 +1,23 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use std::io::{Cursor, Read}; + +use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion}; use deku::prelude::*; #[derive(Debug, PartialEq, DekuRead, DekuWrite)] struct DekuBits { #[deku(bits = "1")] data_01: u8, - #[deku(bits = "7")] + #[deku(bits = "2")] data_02: u8, + #[deku(bits = "5")] + data_03: u8, } #[derive(Debug, PartialEq, DekuRead, DekuWrite)] -struct DekuByte { - data: u8, +struct DekuBytes { + data_00: u8, + data_01: u16, + data_02: u32, } #[derive(Debug, PartialEq, DekuRead, DekuWrite)] @@ -21,16 +27,6 @@ enum DekuEnum { VariantA(u8), } -/// This is faster, because we go right to (endian, bytes) -#[derive(Debug, PartialEq, DekuRead, DekuWrite)] -struct DekuVecPerf { - #[deku(bytes = "1")] - count: u8, - #[deku(count = "count")] - #[deku(bytes = "1")] - data: Vec, -} - #[derive(Debug, PartialEq, DekuRead, DekuWrite)] struct DekuVec { count: u8, @@ -38,98 +34,105 @@ struct DekuVec { data: Vec, } -fn deku_read_bits(input: &[u8]) { - let (_rest, _v) = DekuBits::from_bytes((input, 0)).unwrap(); +fn deku_read_bits(mut reader: impl Read) { + let mut reader = Reader::new(&mut reader); + let _v = DekuBits::from_reader_with_ctx(&mut reader, ()).unwrap(); } fn deku_write_bits(input: &DekuBits) { let _v = input.to_bytes().unwrap(); } -fn deku_read_byte(input: &[u8]) { - let (_rest, _v) = DekuByte::from_bytes((input, 0)).unwrap(); +fn deku_read_byte(mut reader: impl Read) { + let mut reader = Reader::new(&mut reader); + let _v = DekuBytes::from_reader_with_ctx(&mut reader, ()).unwrap(); } -fn deku_write_byte(input: &DekuByte) { +fn deku_write_byte(input: &DekuBytes) { let _v = input.to_bytes().unwrap(); } -fn deku_read_enum(input: &[u8]) { - let (_rest, _v) = DekuEnum::from_bytes((input, 0)).unwrap(); +fn deku_read_enum(mut reader: impl Read) { + let mut reader = Reader::new(&mut reader); + let _v = DekuEnum::from_reader_with_ctx(&mut reader, ()).unwrap(); } fn deku_write_enum(input: &DekuEnum) { let _v = input.to_bytes().unwrap(); } -fn deku_read_vec(input: &[u8]) { - let (_rest, _v) = DekuVec::from_bytes((input, 0)).unwrap(); +fn deku_read_vec(mut reader: impl Read) { + let mut reader = Reader::new(&mut reader); + let _v = DekuVec::from_reader_with_ctx(&mut reader, ()).unwrap(); } fn deku_write_vec(input: &DekuVec) { let _v = input.to_bytes().unwrap(); } -fn deku_read_vec_perf(input: &[u8]) { - let (_rest, _v) = DekuVecPerf::from_bytes((input, 0)).unwrap(); -} - -fn deku_write_vec_perf(input: &DekuVecPerf) { - let _v = input.to_bytes().unwrap(); -} - fn criterion_benchmark(c: &mut Criterion) { c.bench_function("deku_read_byte", |b| { - b.iter(|| deku_read_byte(black_box([0x01].as_ref()))) + let reader = Cursor::new(&[0x01; 1 + 2 + 4]); + b.iter_batched( + || reader.clone(), + |mut reader| deku_read_byte(&mut reader), + BatchSize::SmallInput, + ) }); c.bench_function("deku_write_byte", |b| { - b.iter(|| deku_write_byte(black_box(&DekuByte { data: 0x01 }))) + b.iter(|| { + deku_write_byte(black_box(&DekuBytes { + data_00: 0x00, + data_01: 0x02, + data_02: 0x03, + })) + }) }); c.bench_function("deku_read_bits", |b| { - b.iter(|| deku_read_bits(black_box([0xf1].as_ref()))) + let reader = Cursor::new(&[0x01; 1]); + b.iter_batched( + || reader.clone(), + |mut reader| deku_read_bits(&mut reader), + BatchSize::SmallInput, + ) }); c.bench_function("deku_write_bits", |b| { b.iter(|| { deku_write_bits(black_box(&DekuBits { data_01: 0x0f, - data_02: 0x01, + data_02: 0x00, + data_03: 0x01, })) }) }); c.bench_function("deku_read_enum", |b| { - b.iter(|| deku_read_enum(black_box([0x01, 0x02].as_ref()))) + let reader = Cursor::new(&[0x01; 2]); + b.iter_batched( + || reader.clone(), + |mut reader| deku_read_enum(&mut reader), + BatchSize::SmallInput, + ) }); c.bench_function("deku_write_enum", |b| { b.iter(|| deku_write_enum(black_box(&DekuEnum::VariantA(0x02)))) }); - let deku_read_vec_input = { - let mut v = [0xFFu8; 101].to_vec(); - v[0] = 100u8; - v - }; let deku_write_vec_input = DekuVec { count: 100, - data: vec![0xFF; 100], + data: vec![0xff; 100], }; c.bench_function("deku_read_vec", |b| { - b.iter(|| deku_read_vec(black_box(&deku_read_vec_input))) + let reader = Cursor::new(&[0x08; 8 + 1]); + b.iter_batched( + || reader.clone(), + |mut reader| deku_read_vec(&mut reader), + BatchSize::SmallInput, + ) }); c.bench_function("deku_write_vec", |b| { b.iter(|| deku_write_vec(black_box(&deku_write_vec_input))) }); - - let deku_write_vec_input = DekuVecPerf { - count: 100, - data: vec![0xFF; 100], - }; - c.bench_function("deku_read_vec_perf", |b| { - b.iter(|| deku_read_vec_perf(black_box(&deku_read_vec_input))) - }); - c.bench_function("deku_write_vec_perf", |b| { - b.iter(|| deku_write_vec_perf(black_box(&deku_write_vec_input))) - }); } criterion_group!(benches, criterion_benchmark); diff --git a/deku-derive/src/lib.rs b/deku-derive/src/lib.rs index f9dbde48..87e28f4d 100644 --- a/deku-derive/src/lib.rs +++ b/deku-derive/src/lib.rs @@ -4,13 +4,18 @@ Procedural macros that implement `DekuRead` and `DekuWrite` traits #![warn(missing_docs)] -use crate::macros::{deku_read::emit_deku_read, deku_write::emit_deku_write}; +use std::borrow::Cow; +use std::convert::TryFrom; + use darling::{ast, FromDeriveInput, FromField, FromMeta, FromVariant, ToTokens}; use proc_macro2::TokenStream; use quote::quote; -use std::borrow::Cow; -use std::convert::TryFrom; -use syn::{punctuated::Punctuated, spanned::Spanned, AttributeArgs}; +use syn::punctuated::Punctuated; +use syn::spanned::Spanned; +use syn::AttributeArgs; + +use crate::macros::deku_read::emit_deku_read; +use crate::macros::deku_write::emit_deku_write; mod macros; @@ -662,10 +667,8 @@ fn apply_replacements(input: &syn::LitStr) -> Result, Repla } let input_str = input_value - .replace("deku::input", "__deku_input") // part of the public API `from_bytes` - .replace("deku::input_bits", "__deku_input_bits") // part of the public API `read` + .replace("deku::reader", "__deku_reader") .replace("deku::output", "__deku_output") // part of the public API `write` - .replace("deku::rest", "__deku_rest") .replace("deku::bit_offset", "__deku_bit_offset") .replace("deku::byte_offset", "__deku_byte_offset"); @@ -1006,10 +1009,11 @@ pub fn deku_derive( #[cfg(test)] mod tests { - use super::*; use rstest::rstest; use syn::parse_str; + use super::*; + #[rstest(input, // Valid struct case::struct_empty(r#"struct Test {}"#), diff --git a/deku-derive/src/macros/deku_read.rs b/deku-derive/src/macros/deku_read.rs index 5410608e..97720071 100644 --- a/deku-derive/src/macros/deku_read.rs +++ b/deku-derive/src/macros/deku_read.rs @@ -1,16 +1,16 @@ +use std::convert::TryFrom; + +use darling::ast::{Data, Fields}; +use darling::ToTokens; +use proc_macro2::TokenStream; +use quote::quote; +use syn::spanned::Spanned; + use crate::macros::{ gen_ctx_types_and_arg, gen_field_args, gen_internal_field_ident, gen_internal_field_idents, gen_type_from_ctx_id, pad_bits, token_contains_string, wrap_default_ctx, }; use crate::{DekuData, DekuDataEnum, DekuDataStruct, FieldData, Id}; -use darling::{ - ast::{Data, Fields}, - ToTokens, -}; -use proc_macro2::TokenStream; -use quote::quote; -use std::convert::TryFrom; -use syn::spanned::Spanned; pub(crate) fn emit_deku_read(input: &DekuData) -> Result { match &input.data { @@ -45,7 +45,7 @@ fn emit_struct(input: &DekuData) -> Result { .and_then(|v| v.ident.as_ref()) .is_some(); - let (field_idents, field_reads) = emit_field_reads(input, &fields, &ident)?; + let (field_idents, field_reads) = emit_field_reads(input, &fields, &ident, false)?; // filter out temporary fields let field_idents = field_idents @@ -59,36 +59,44 @@ fn emit_struct(input: &DekuData) -> Result { // Implement `DekuContainerRead` for types that don't need a context if input.ctx.is_none() || (input.ctx.is_some() && input.ctx_default.is_some()) { - let from_bytes_body = wrap_default_ctx( - quote! { - use core::convert::TryFrom; - use ::#crate_::bitvec::BitView; - let __deku_input_bits = __deku_input.0.view_bits::<::#crate_::bitvec::Msb0>(); - - let mut __deku_rest = __deku_input_bits; - __deku_rest = &__deku_rest[__deku_input.1..]; + let from_reader_body = quote! { + use core::convert::TryFrom; + let __deku_reader = &mut deku::reader::Reader::new(__deku_input.0); + if __deku_input.1 != 0 { + __deku_reader.skip_bits(__deku_input.1)?; + } - #magic_read + let __deku_value = Self::from_reader_with_ctx(__deku_reader, ())?; - #(#field_reads)* - let __deku_value = #initialize_struct; + Ok((__deku_reader.bits_read, __deku_value)) + }; - let __deku_pad = 8 * ((__deku_rest.len() + 7) / 8) - __deku_rest.len(); - let __deku_read_idx = __deku_input_bits.len() - (__deku_rest.len() + __deku_pad); + let from_bytes_body = quote! { + use core::convert::TryFrom; + let mut __deku_cursor = #crate_::no_std_io::Cursor::new(__deku_input.0); + let mut __deku_reader = &mut deku::reader::Reader::new(&mut __deku_cursor); + if __deku_input.1 != 0 { + __deku_reader.skip_bits(__deku_input.1)?; + } - Ok(((__deku_input_bits[__deku_read_idx..].domain().region().unwrap().1, __deku_pad), __deku_value)) - }, - &input.ctx, - &input.ctx_default, - ); + let __deku_value = Self::from_reader_with_ctx(__deku_reader, ())?; + let read_whole_byte = (__deku_reader.bits_read % 8) == 0; + let idx = if read_whole_byte { + __deku_reader.bits_read / 8 + } else { + (__deku_reader.bits_read - (__deku_reader.bits_read % 8)) / 8 + }; + Ok(((&__deku_input.0[idx..], __deku_reader.bits_read % 8), __deku_value)) + }; tokens.extend(emit_try_from(&imp, &lifetime, &ident, wher)); - tokens.extend(emit_from_bytes( + tokens.extend(emit_container_read( &imp, &lifetime, &ident, wher, + from_reader_body, from_bytes_body, )); } @@ -97,19 +105,18 @@ fn emit_struct(input: &DekuData) -> Result { let read_body = quote! { use core::convert::TryFrom; - let mut __deku_rest = __deku_input_bits; #magic_read #(#field_reads)* let __deku_value = #initialize_struct; - Ok((__deku_rest, __deku_value)) + Ok(__deku_value) }; tokens.extend(quote! { - impl #imp ::#crate_::DekuRead<#lifetime, #ctx_types> for #ident #wher { - fn read(__deku_input_bits: &#lifetime ::#crate_::bitvec::BitSlice, #ctx_arg) -> core::result::Result<(&#lifetime ::#crate_::bitvec::BitSlice, Self), ::#crate_::DekuError> { + impl #imp ::#crate_::DekuReader<#lifetime, #ctx_types> for #ident #wher { + fn from_reader_with_ctx(__deku_reader: &mut ::#crate_::reader::Reader, #ctx_arg) -> core::result::Result { #read_body } } @@ -119,8 +126,8 @@ fn emit_struct(input: &DekuData) -> Result { let read_body = wrap_default_ctx(read_body, &input.ctx, &input.ctx_default); tokens.extend(quote! { - impl #imp ::#crate_::DekuRead<#lifetime> for #ident #wher { - fn read(__deku_input_bits: &#lifetime ::#crate_::bitvec::BitSlice, _: ()) -> core::result::Result<(&#lifetime ::#crate_::bitvec::BitSlice, Self), ::#crate_::DekuError> { + impl #imp ::#crate_::DekuReader<#lifetime> for #ident #wher { + fn from_reader_with_ctx(__deku_reader: &mut ::#crate_::reader::Reader, _: ()) -> core::result::Result { #read_body } } @@ -172,13 +179,21 @@ fn emit_enum(input: &DekuData) -> Result { .and_then(|v| v.ident.as_ref()) .is_some(); - let (consume_id, variant_id) = if let Some(variant_id) = &variant.id { + let (use_id, variant_id) = if let Some(variant_id) = &variant.id { match variant_id { - Id::TokenStream(v) => (true, quote! {&#v}.into_token_stream()), - Id::LitByteStr(v) => (true, v.into_token_stream()), + Id::TokenStream(v) => (false, quote! {&#v}.into_token_stream()), + Id::LitByteStr(v) => (false, v.into_token_stream()), } } else if let Some(variant_id_pat) = &variant.id_pat { - (false, variant_id_pat.clone()) + // If user has supplied an id, then we have an id_pat that and the id variant doesn't + // need read into an id value + if id.is_none() { + // if set, the first field read will not read from reader and instead + // be __deku_variant_id + (true, variant_id_pat.clone()) + } else { + (false, variant_id_pat.clone()) + } } else if has_discriminant { let ident = &variant.ident; let internal_ident = gen_internal_field_ident("e!(#ident)); @@ -205,7 +220,7 @@ fn emit_enum(input: &DekuData) -> Result { quote! { #variant_reader; } } else { let (field_idents, field_reads) = - emit_field_reads(input, &variant.fields.as_ref(), &ident)?; + emit_field_reads(input, &variant.fields.as_ref(), &ident, use_id)?; // filter out temporary fields let field_idents = field_idents @@ -226,18 +241,8 @@ fn emit_enum(input: &DekuData) -> Result { deku_ids.push(deku_id); } - // if we're consuming an id, set the rest to new_rest before reading the variant - let new_rest = if consume_id { - quote! { - __deku_rest = __deku_new_rest; - } - } else { - quote! {} - }; - quote! { { - #new_rest #(#field_reads)* Self :: #initialize_enum } @@ -289,11 +294,11 @@ fn emit_enum(input: &DekuData) -> Result { let variant_id_read = if id.is_some() { quote! { - let (__deku_new_rest, __deku_variant_id) = (__deku_rest, (#id)); + let __deku_variant_id = (#id); } } else if id_type.is_some() { quote! { - let (__deku_new_rest, __deku_variant_id) = <#id_type>::read(__deku_rest, (#id_args))?; + let __deku_variant_id = <#id_type>::from_reader_with_ctx(__deku_reader, (#id_args))?; } } else { // either `id` or `type` needs to be specified @@ -312,35 +317,44 @@ fn emit_enum(input: &DekuData) -> Result { // Implement `DekuContainerRead` for types that don't need a context if input.ctx.is_none() || (input.ctx.is_some() && input.ctx_default.is_some()) { - let from_bytes_body = wrap_default_ctx( - quote! { - use core::convert::TryFrom; - use ::#crate_::bitvec::BitView; - let __deku_input_bits = __deku_input.0.view_bits::<::#crate_::bitvec::Msb0>(); - - let mut __deku_rest = __deku_input_bits; - __deku_rest = &__deku_rest[__deku_input.1..]; + let from_reader_body = quote! { + use core::convert::TryFrom; + let __deku_reader = &mut deku::reader::Reader::new(__deku_input.0); + if __deku_input.1 != 0 { + __deku_reader.skip_bits(__deku_input.1)?; + } - #magic_read + let __deku_value = Self::from_reader_with_ctx(__deku_reader, ())?; - #variant_read + Ok((__deku_reader.bits_read, __deku_value)) + }; - let __deku_pad = 8 * ((__deku_rest.len() + 7) / 8) - __deku_rest.len(); - let __deku_read_idx = __deku_input_bits.len() - (__deku_rest.len() + __deku_pad); + let from_bytes_body = quote! { + use core::convert::TryFrom; + let mut __deku_cursor = #crate_::no_std_io::Cursor::new(__deku_input.0); + let mut __deku_reader = &mut deku::reader::Reader::new(&mut __deku_cursor); + if __deku_input.1 != 0 { + __deku_reader.skip_bits(__deku_input.1)?; + } - Ok(((__deku_input_bits[__deku_read_idx..].domain().region().unwrap().1, __deku_pad), __deku_value)) - }, - &input.ctx, - &input.ctx_default, - ); + let __deku_value = Self::from_reader_with_ctx(__deku_reader, ())?; + let read_whole_byte = (__deku_reader.bits_read % 8) == 0; + let idx = if read_whole_byte { + __deku_reader.bits_read / 8 + } else { + (__deku_reader.bits_read - (__deku_reader.bits_read % 8)) / 8 + }; + Ok(((&__deku_input.0[idx..], __deku_reader.bits_read % 8), __deku_value)) + }; tokens.extend(emit_try_from(&imp, &lifetime, &ident, wher)); - tokens.extend(emit_from_bytes( + tokens.extend(emit_container_read( &imp, &lifetime, &ident, wher, + from_reader_body, from_bytes_body, )); } @@ -348,19 +362,18 @@ fn emit_enum(input: &DekuData) -> Result { let read_body = quote! { use core::convert::TryFrom; - let mut __deku_rest = __deku_input_bits; #magic_read #variant_read - Ok((__deku_rest, __deku_value)) + Ok(__deku_value) }; tokens.extend(quote! { #[allow(non_snake_case)] - impl #imp ::#crate_::DekuRead<#lifetime, #ctx_types> for #ident #wher { - fn read(__deku_input_bits: &#lifetime ::#crate_::bitvec::BitSlice, #ctx_arg) -> core::result::Result<(&#lifetime ::#crate_::bitvec::BitSlice, Self), ::#crate_::DekuError> { + impl #imp ::#crate_::DekuReader<#lifetime, #ctx_types> for #ident #wher { + fn from_reader_with_ctx(__deku_reader: &mut ::#crate_::reader::Reader, #ctx_arg) -> core::result::Result { #read_body } } @@ -371,8 +384,8 @@ fn emit_enum(input: &DekuData) -> Result { tokens.extend(quote! { #[allow(non_snake_case)] - impl #imp ::#crate_::DekuRead<#lifetime> for #ident #wher { - fn read(__deku_input_bits: &#lifetime ::#crate_::bitvec::BitSlice, _: ()) -> core::result::Result<(&#lifetime ::#crate_::bitvec::BitSlice, Self), ::#crate_::DekuError> { + impl #imp ::#crate_::DekuReader<#lifetime> for #ident #wher { + fn from_reader_with_ctx(__deku_reader: &mut ::#crate_::reader::Reader, _: ()) -> core::result::Result { #read_body } } @@ -383,7 +396,10 @@ fn emit_enum(input: &DekuData) -> Result { Some(quote! {#id_type}) } else if let (Some(ctx), Some(id)) = (input.ctx.as_ref(), input.id.as_ref()) { Some(gen_type_from_ctx_id(ctx, id).ok_or_else(|| { - syn::Error::new(id.span(), "DekuRead: cannot determine `id` type from `ctx`") + syn::Error::new( + id.span(), + "DekuReader: cannot determine `id` type from `ctx`", + ) })?) } else { None @@ -414,12 +430,10 @@ fn emit_magic_read(input: &DekuData) -> TokenStream { let __deku_magic = #magic; for __deku_byte in __deku_magic { - let (__deku_new_rest, __deku_read_byte) = u8::read(__deku_rest, ())?; + let __deku_read_byte = u8::from_reader_with_ctx(__deku_reader, ())?; if *__deku_byte != __deku_read_byte { return Err(::#crate_::DekuError::Parse(format!("Missing magic value {:?}", #magic))); } - - __deku_rest = __deku_new_rest; } } } else { @@ -436,12 +450,16 @@ fn emit_field_reads( input: &DekuData, fields: &Fields<&FieldData>, ident: &TokenStream, + use_id: bool, ) -> Result<(Vec, Vec), syn::Error> { let mut field_reads = Vec::with_capacity(fields.len()); let mut field_idents = Vec::with_capacity(fields.len()); + let mut use_id = use_id; + for (i, f) in fields.iter().enumerate() { - let (field_ident, field_read) = emit_field_read(input, i, f, ident)?; + let (field_ident, field_read) = emit_field_read(input, i, f, ident, use_id)?; + use_id = false; field_idents.push(FieldIdent { field_ident, is_temp: f.temp, @@ -461,7 +479,7 @@ fn emit_bit_byte_offsets( .any(|v| token_contains_string(v, "__deku_byte_offset")) { Some(quote! { - let __deku_byte_offset = __deku_bit_offset / 8; + let __deku_byte_offset = __deku_reader.bits_read / 8; }) } else { None @@ -473,7 +491,7 @@ fn emit_bit_byte_offsets( || byte_offset.is_some() { Some(quote! { - let __deku_bit_offset = usize::try_from(unsafe { __deku_rest.as_bitptr().offset_from(__deku_input_bits.as_bitptr()) } )?; + let __deku_bit_offset = __deku_reader.bits_read; }) } else { None @@ -487,6 +505,7 @@ fn emit_padding(bit_size: &TokenStream) -> TokenStream { quote! { { use core::convert::TryFrom; + // TODO: I hope this consts in most cases? let __deku_pad = usize::try_from(#bit_size).map_err(|e| ::#crate_::DekuError::InvalidParam(format!( "Invalid padding param \"({})\": cannot convert to usize", @@ -494,11 +513,13 @@ fn emit_padding(bit_size: &TokenStream) -> TokenStream { )) )?; - if __deku_rest.len() >= __deku_pad { - let (__deku_padded_bits, __deku_new_rest) = __deku_rest.split_at(__deku_pad); - __deku_rest = __deku_new_rest; + + if (__deku_pad % 8) == 0 { + let bytes_read = __deku_pad / 8; + let mut buf = vec![0; bytes_read]; + let _ = __deku_reader.read_bytes(bytes_read, &mut buf)?; } else { - return Err(::#crate_::DekuError::Incomplete(::#crate_::error::NeedSize::new(__deku_pad))); + let _ = __deku_reader.read_bits(__deku_pad)?; } } } @@ -509,6 +530,7 @@ fn emit_field_read( i: usize, f: &FieldData, ident: &TokenStream, + use_id: bool, ) -> Result<(TokenStream, TokenStream), syn::Error> { let crate_ = super::get_crate_name(); let field_type = &f.ty; @@ -581,14 +603,14 @@ fn emit_field_read( let trace_field_log = if cfg!(feature = "logging") { quote! { - log::trace!("Reading: {}::{} from {}", #ident, #field_ident_str, __deku_rest); + log::trace!("Reading: {}::{}", #ident, #field_ident_str); } } else { quote! {} }; let field_read_func = if field_reader.is_some() { - quote! { #field_reader } + quote! { #field_reader? } } else { let read_args = gen_field_args( field_endian, @@ -597,7 +619,7 @@ fn emit_field_read( f.ctx.as_ref(), )?; - // The container limiting options are special, we need to generate `(limit, (other, ..))` for them. + // The __deku_reader limiting options are special, we need to generate `(limit, (other, ..))` for them. // These have a problem where when it isn't a copy type, the field will be moved. // e.g. struct FooBar { // a: Baz // a type implement `Into` but not `Copy`. @@ -608,38 +630,67 @@ fn emit_field_read( let type_as_deku_read = if f.map.is_some() { // with map, field_type cannot be used as the // resulting type is within the function. - quote!(::#crate_::DekuRead) + quote!(::#crate_::DekuReader) } else { // use type directly - quote!(<#field_type as ::#crate_::DekuRead<'_, _>>) + quote!(<#field_type as ::#crate_::DekuReader<'_, _>>) }; - if let Some(field_count) = &f.count { + + if use_id { + quote! { + __deku_variant_id + } + } else if let Some(field_count) = &f.count { quote! { { use core::borrow::Borrow; - #type_as_deku_read::read(__deku_rest, (::#crate_::ctx::Limit::new_count(usize::try_from(*((#field_count).borrow()))?), (#read_args))) + #type_as_deku_read::from_reader_with_ctx + ( + __deku_reader, + (::#crate_::ctx::Limit::new_count(usize::try_from(*((#field_count).borrow()))?), (#read_args)) + )? } } } else if let Some(field_bits) = &f.bits_read { quote! { { use core::borrow::Borrow; - #type_as_deku_read::read(__deku_rest, (::#crate_::ctx::Limit::new_bit_size(::#crate_::ctx::BitSize(usize::try_from(*((#field_bits).borrow()))?)), (#read_args))) + #type_as_deku_read::from_reader_with_ctx + ( + __deku_reader, + (::#crate_::ctx::Limit::new_bit_size(::#crate_::ctx::BitSize(usize::try_from(*((#field_bits).borrow()))?)), (#read_args)) + )? } } } else if let Some(field_bytes) = &f.bytes_read { quote! { { use core::borrow::Borrow; - #type_as_deku_read::read(__deku_rest, (::#crate_::ctx::Limit::new_byte_size(::#crate_::ctx::ByteSize(usize::try_from(*((#field_bytes).borrow()))?)), (#read_args))) + #type_as_deku_read::from_reader_with_ctx + ( + __deku_reader, + (::#crate_::ctx::Limit::new_byte_size(::#crate_::ctx::ByteSize(usize::try_from(*((#field_bytes).borrow()))?)), (#read_args)) + )? } } } else if let Some(field_until) = &f.until { // We wrap the input into another closure here to enforce that it is actually a callable // Otherwise, an incorrectly passed-in integer could unexpectedly convert into a `Count` limit - quote! {#type_as_deku_read::read(__deku_rest, (::#crate_::ctx::Limit::new_until(#field_until), (#read_args)))} + quote! { + #type_as_deku_read::from_reader_with_ctx + ( + __deku_reader, + (::#crate_::ctx::Limit::new_until(#field_until), (#read_args)) + )? + } } else { - quote! {#type_as_deku_read::read(__deku_rest, (#read_args))} + quote! { + #type_as_deku_read::from_reader_with_ctx + ( + __deku_reader, + (#read_args) + )? + } } }; @@ -655,11 +706,8 @@ fn emit_field_read( ); let field_read_normal = quote! { - let (__deku_new_rest, __deku_value) = #field_read_func?; + let __deku_value = #field_read_func; let __deku_value: #field_type = #field_map(__deku_value)?; - - __deku_rest = __deku_new_rest; - __deku_value }; @@ -720,20 +768,26 @@ fn emit_field_read( Ok((field_ident, field_read)) } -/// emit `from_bytes()` for struct/enum -pub fn emit_from_bytes( +/// emit `from_reader()` and `from_bytes()` for struct/enum +pub fn emit_container_read( imp: &syn::ImplGenerics, lifetime: &TokenStream, ident: &TokenStream, wher: Option<&syn::WhereClause>, - body: TokenStream, + from_reader_body: TokenStream, + from_bytes_body: TokenStream, ) -> TokenStream { let crate_ = super::get_crate_name(); quote! { impl #imp ::#crate_::DekuContainerRead<#lifetime> for #ident #wher { + #[allow(non_snake_case)] + fn from_reader<'a, R: ::#crate_::no_std_io::Read>(__deku_input: (&'a mut R, usize)) -> core::result::Result<(usize, Self), ::#crate_::DekuError> { + #from_reader_body + } + #[allow(non_snake_case)] fn from_bytes(__deku_input: (&#lifetime [u8], usize)) -> core::result::Result<((&#lifetime [u8], usize), Self), ::#crate_::DekuError> { - #body + #from_bytes_body } } } @@ -752,8 +806,10 @@ pub fn emit_try_from( type Error = ::#crate_::DekuError; fn try_from(input: &#lifetime [u8]) -> core::result::Result { - let (rest, res) = ::from_bytes((input, 0))?; - if !rest.0.is_empty() { + let total_len = input.len(); + let mut cursor = ::#crate_::no_std_io::Cursor::new(input); + let (amt_read, res) = ::from_reader((&mut cursor, 0))?; + if (amt_read / 8) != total_len { return Err(::#crate_::DekuError::Parse(format!("Too much data"))); } Ok(res) diff --git a/deku-derive/src/macros/deku_write.rs b/deku-derive/src/macros/deku_write.rs index 331aea5d..87ef27f0 100644 --- a/deku-derive/src/macros/deku_write.rs +++ b/deku-derive/src/macros/deku_write.rs @@ -1,12 +1,14 @@ +use std::convert::TryFrom; + +use darling::ast::{Data, Fields}; +use proc_macro2::TokenStream; +use quote::quote; + use crate::macros::{ gen_ctx_types_and_arg, gen_field_args, gen_struct_destruction, pad_bits, token_contains_string, wrap_default_ctx, }; use crate::{DekuData, DekuDataEnum, DekuDataStruct, FieldData, Id}; -use darling::ast::{Data, Fields}; -use proc_macro2::TokenStream; -use quote::quote; -use std::convert::TryFrom; pub(crate) fn emit_deku_write(input: &DekuData) -> Result { match &input.data { diff --git a/deku-derive/src/macros/mod.rs b/deku-derive/src/macros/mod.rs index a039cab1..3dccf463 100644 --- a/deku-derive/src/macros/mod.rs +++ b/deku-derive/src/macros/mod.rs @@ -1,4 +1,3 @@ -use crate::Num; use proc_macro2::{Ident, Span, TokenStream}; use quote::{quote, ToTokens}; use syn::parse::Parser; @@ -6,6 +5,8 @@ use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::token::Comma; +use crate::Num; + pub(crate) mod deku_read; pub(crate) mod deku_write; diff --git a/ensure_no_std/Cargo.toml b/ensure_no_std/Cargo.toml index a8da0522..f3799d4a 100644 --- a/ensure_no_std/Cargo.toml +++ b/ensure_no_std/Cargo.toml @@ -19,5 +19,7 @@ default = ["alloc"] alloc = [] [dependencies] -wee_alloc = "0.4" +cortex-m-rt = "0.7.3" deku = { path = "../", default-features = false, features = ["alloc"] } +embedded-alloc = "0.5.0" + diff --git a/ensure_no_std/src/bin/main.rs b/ensure_no_std/src/bin/main.rs index f065f8c0..8bf3102a 100644 --- a/ensure_no_std/src/bin/main.rs +++ b/ensure_no_std/src/bin/main.rs @@ -1,41 +1,16 @@ -//! Based on https://github.com/rustwasm/wee_alloc/tree/master/example -//! Run with `cargo +nightly run --release` - +//! cargo build --target thumbv7em-none-eabihf #![no_std] #![no_main] -#![feature(core_intrinsics, lang_items, alloc_error_handler)] extern crate alloc; -extern crate wee_alloc; - -#[no_mangle] -#[allow(non_snake_case)] -fn _Unwind_Resume() {} -#[global_allocator] -static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; +use core::panic::PanicInfo; -// Need to provide a tiny `panic` implementation for `#![no_std]`. -// This translates into an `unreachable` instruction that will -// raise a `trap` the WebAssembly execution if we panic at runtime. -#[panic_handler] -#[no_mangle] -unsafe fn panic(_info: &::core::panic::PanicInfo) -> ! { - ::core::intrinsics::abort(); -} - -// Need to provide an allocation error handler which just aborts -// the execution with trap. -#[alloc_error_handler] -#[no_mangle] -unsafe fn oom(_: ::core::alloc::Layout) -> ! { - ::core::intrinsics::abort(); -} +use cortex_m_rt::entry; +use embedded_alloc::Heap; -// Needed for non-wasm targets. -#[lang = "eh_personality"] -#[no_mangle] -extern "C" fn eh_personality() {} +#[global_allocator] +static HEAP: Heap = Heap::empty(); use alloc::{format, vec, vec::Vec}; use deku::prelude::*; @@ -51,12 +26,24 @@ struct DekuTest { data: Vec, } -#[no_mangle] -pub extern "C" fn main() -> () { - let test_data: Vec = vec![0b10101_101, 0x02, 0xBE, 0xEF]; +#[entry] +fn main() -> ! { + // Initialize the allocator BEFORE you use it + { + use core::mem::MaybeUninit; + const HEAP_SIZE: usize = 1024; + static mut HEAP_MEM: [MaybeUninit; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE]; + unsafe { HEAP.init(HEAP_MEM.as_ptr() as usize, HEAP_SIZE) } + } + + // now the allocator is ready types like Box, Vec can be used. + + #[allow(clippy::unusual_byte_groupings)] + let test_data: &[u8] = &[0b10101_101, 0x02, 0xBE, 0xEF]; + let mut cursor = deku::no_std_io::Cursor::new(test_data); // Test reading - let (_rest, val) = DekuTest::from_bytes((&test_data, 0)).unwrap(); + let (_rest, val) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest { field_a: 0b10101, @@ -68,6 +55,12 @@ pub extern "C" fn main() -> () { ); // Test writing - let val = val.to_bytes().unwrap(); - assert_eq!(test_data, val); + let _val = val.to_bytes().unwrap(); + + loop { /* .. */ } +} + +#[panic_handler] +fn panic(_: &PanicInfo) -> ! { + loop {} } diff --git a/ensure_wasm/src/lib.rs b/ensure_wasm/src/lib.rs index 217b3b0c..539f94ec 100644 --- a/ensure_wasm/src/lib.rs +++ b/ensure_wasm/src/lib.rs @@ -34,7 +34,8 @@ pub struct DekuTest { #[wasm_bindgen] pub fn deku_read(input: &[u8]) -> DekuTest { - let (_rest, val) = DekuTest::from_bytes((input, 0)).unwrap(); + let mut cursor = deku::no_std_io::Cursor::new(input); + let (_rest, val) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); val } diff --git a/ensure_wasm/tests/deku.rs b/ensure_wasm/tests/deku.rs index c181dae0..ac21e6ba 100644 --- a/ensure_wasm/tests/deku.rs +++ b/ensure_wasm/tests/deku.rs @@ -15,7 +15,7 @@ fn test_read() { field_b: 0b101, field_c: 0xBE }, - deku_read([0b10101_101, 0xBE].as_ref()) + deku_read(&mut [0b10101_101, 0xBE]) ) } diff --git a/examples/custom_reader_and_writer.rs b/examples/custom_reader_and_writer.rs index 29e35e58..908696c4 100644 --- a/examples/custom_reader_and_writer.rs +++ b/examples/custom_reader_and_writer.rs @@ -1,29 +1,27 @@ -use deku::bitvec::{BitSlice, BitVec, Msb0}; +use std::convert::TryInto; + +use deku::bitvec::{BitVec, Msb0}; use deku::ctx::BitSize; use deku::prelude::*; -use std::convert::TryInto; -fn bit_flipper_read( +fn bit_flipper_read( field_a: u8, - rest: &BitSlice, + reader: &mut Reader, bit_size: BitSize, -) -> Result<(&BitSlice, u8), DekuError> { +) -> Result { // Access to previously read fields println!("field_a = 0x{:X}", field_a); - // The current rest - println!("rest = {:?}", rest); - // Size of the current field println!("bit_size: {:?}", bit_size); // read field_b, calling original func - let (rest, value) = u8::read(rest, bit_size)?; + let value = u8::from_reader_with_ctx(reader, bit_size)?; // flip the bits on value if field_a is 0x01 let value = if field_a == 0x01 { !value } else { value }; - Ok((rest, value)) + Ok(value) } fn bit_flipper_write( @@ -52,16 +50,16 @@ struct DekuTest { field_a: u8, #[deku( - reader = "bit_flipper_read(*field_a, deku::rest, BitSize(8))", + reader = "bit_flipper_read(*field_a, deku::reader, BitSize(8))", writer = "bit_flipper_write(*field_a, *field_b, deku::output, BitSize(8))" )] field_b: u8, } fn main() { - let test_data: &[u8] = [0x01, 0b1001_0110].as_ref(); + let test_data = [0x01, 0b1001_0110]; - let (_rest, ret_read) = DekuTest::from_bytes((test_data, 0)).unwrap(); + let (_read_amt, ret_read) = DekuTest::from_reader((&mut test_data.as_slice(), 0)).unwrap(); assert_eq!( ret_read, diff --git a/examples/deku_input.rs b/examples/deku_input.rs new file mode 100644 index 00000000..34974a5a --- /dev/null +++ b/examples/deku_input.rs @@ -0,0 +1,43 @@ +//! Example of a close replacement for deku::input +use deku::prelude::*; +use std::io::{self, Cursor, Read}; + +/// Every read to this struct will be saved into an internal cache. This is to keep the cache +/// around for the crc without reading from the buffer twice +struct ReaderCrc { + reader: R, + pub cache: Vec, +} + +impl ReaderCrc { + pub fn new(reader: R) -> Self { + Self { + reader, + cache: vec![], + } + } +} + +impl Read for ReaderCrc { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let n = self.reader.read(buf); + self.cache.extend_from_slice(buf); + n + } +} + +#[derive(Debug, DekuRead)] +pub struct DekuStruct { + pub a: u8, + pub b: u8, +} + +fn main() { + let data = vec![0x01, 0x02]; + let input = Cursor::new(&data); + let mut reader = ReaderCrc::new(input); + let (_, s) = DekuStruct::from_reader((&mut reader, 0)).unwrap(); + assert_eq!(reader.cache, data); + assert_eq!(s.a, 1); + assert_eq!(s.b, 2); +} diff --git a/examples/enums.rs b/examples/enums.rs index f6a16a90..9286c15b 100644 --- a/examples/enums.rs +++ b/examples/enums.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; +use std::io::Cursor; + +use deku::{prelude::*, reader::Reader}; use hexlit::hex; -use std::convert::TryFrom; #[derive(Debug, PartialEq, DekuRead, DekuWrite)] #[deku(type = "u8")] @@ -21,12 +22,16 @@ enum DekuTest { Var5 { id: u8 }, #[deku(id_pat = "&id if id > 6")] Var6 { id: u8 }, + #[deku(id_pat = "_")] + VarDefault { id: u8, value: u8 }, } fn main() { let test_data = hex!("03020102").to_vec(); - let deku_test = DekuTest::try_from(test_data.as_ref()).unwrap(); + let mut cursor = Cursor::new(&test_data); + let mut reader = Reader::new(&mut cursor); + let deku_test = DekuTest::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!( DekuTest::Var4 { diff --git a/examples/enums_catch_all.rs b/examples/enums_catch_all.rs index b967ad86..8126d1e0 100644 --- a/examples/enums_catch_all.rs +++ b/examples/enums_catch_all.rs @@ -1,7 +1,7 @@ +use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; use hexlit::hex; -use std::convert::TryFrom; -use std::convert::TryInto; #[derive(Clone, Copy, PartialEq, Eq, Debug, DekuWrite, DekuRead)] #[deku(type = "u8")] diff --git a/examples/example.rs b/examples/example.rs index 6e957d33..eae2f91c 100644 --- a/examples/example.rs +++ b/examples/example.rs @@ -1,11 +1,18 @@ +//! To test out the "logging" feature: +//! ``` +//! $ RUST_LOG=trace cargo run --example example --features logging +//! ``` + #![allow(clippy::unusual_byte_groupings)] -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[derive(Debug, PartialEq, DekuRead, DekuWrite)] struct FieldF { #[deku(bits = "6")] + #[deku(assert_eq = "6")] data: u8, } @@ -15,7 +22,6 @@ struct FieldF { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | field_a | field_b |c| field_d | e | f | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// #[derive(Debug, PartialEq, DekuRead, DekuWrite)] // #[deku(endian = "little")] // By default it uses the system endianness, but can be overwritten struct DekuTest { @@ -35,32 +41,33 @@ struct DekuTest { } fn main() { - let test_data: &[u8] = [ - 0xAB, + env_logger::init(); + let test_data: &[u8] = &[ + 0xab, 0b1010010_1, - 0xAB, - 0xCD, + 0xab, + 0xcd, 0b1100_0110, 0x02, - 0xBE, - 0xEF, - 0xC0, - 0xFE, - ] - .as_ref(); + 0xbe, + 0xef, + 0xc0, + 0xfe, + ]; let test_deku = DekuTest::try_from(test_data).unwrap(); + println!("{test_deku:02x?}"); assert_eq!( DekuTest { - field_a: 0xAB, + field_a: 0xab, field_b: 0b0_1010010, field_c: 0b0000000_1, - field_d: 0xABCD, + field_d: 0xabcd, field_e: 0b0000_0011, field_f: FieldF { data: 0b00_000110 }, num_items: 2, - items: vec![0xBEEF, 0xC0FE], + items: vec![0xbeef, 0xc0fe], }, test_deku ); diff --git a/examples/ipv4.rs b/examples/ipv4.rs index 77052834..a14e1c35 100644 --- a/examples/ipv4.rs +++ b/examples/ipv4.rs @@ -1,7 +1,8 @@ +use std::convert::TryInto; +use std::net::Ipv4Addr; + use deku::prelude::*; use hexlit::hex; -use std::convert::{TryFrom, TryInto}; -use std::net::Ipv4Addr; /// Ipv4 Header /// ```text @@ -42,15 +43,17 @@ pub struct Ipv4Header { pub protocol: u8, // Protocol pub checksum: u16, // Header checksum pub src: Ipv4Addr, // Source IP Address - pub dst: Ipv4Addr, // Destination IP Address - // options - // padding + pub dst: Ipv4Addr, /* Destination IP Address + * options + * padding */ } fn main() { let test_data = hex!("4500004b0f490000801163a591fea0ed91fd02cb").to_vec(); - let ip_header = Ipv4Header::try_from(test_data.as_ref()).unwrap(); + let mut cursor = std::io::Cursor::new(test_data.clone()); + let mut reader = deku::reader::Reader::new(&mut cursor); + let ip_header = Ipv4Header::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!( Ipv4Header { diff --git a/examples/many.rs b/examples/many.rs new file mode 100644 index 00000000..d0b62e06 --- /dev/null +++ b/examples/many.rs @@ -0,0 +1,32 @@ +use deku::{ctx::Limit, prelude::*, DekuRead, DekuWrite}; +use std::io::Cursor; + +#[derive(Debug, DekuRead, DekuWrite)] +struct Test { + pub a: u64, + pub b: u64, + pub c: u64, +} + +fn main() { + let input: Vec<_> = (0..10_0000) + .map(|i| Test { + a: i, + b: i + 1, + c: i + 2, + }) + .collect(); + let custom: Vec = input + .iter() + .flat_map(|x| x.to_bytes().unwrap().into_iter()) + .collect(); + + let mut binding = Cursor::new(custom.clone()); + let mut reader = Reader::new(&mut binding); + let ret = as DekuReader>>::from_reader_with_ctx( + &mut reader, + Limit::new_count(10_0000), + ); + + println!("{:?}", ret); +} diff --git a/src/attributes.rs b/src/attributes.rs index 45b70fc7..16b0028f 100644 --- a/src/attributes.rs +++ b/src/attributes.rs @@ -75,6 +75,7 @@ Example: ```rust # use deku::prelude::*; # use std::convert::{TryInto, TryFrom}; +# use std::io::Cursor; # #[derive(Debug, PartialEq, DekuRead, DekuWrite)] // #[deku(endian = "little")] // top-level, defaults to system endianness struct DekuTest { @@ -83,9 +84,10 @@ struct DekuTest { field_default: u16, // defaults to top-level } -let data: Vec = vec![0xAB, 0xCD, 0xAB, 0xCD]; +let data: &[u8] = &[0xAB, 0xCD, 0xAB, 0xCD]; +let mut cursor = Cursor::new(data); -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -96,7 +98,7 @@ assert_eq!( ); let value: Vec = value.try_into().unwrap(); -assert_eq!(data, value); +assert_eq!(data, &*value); ``` **Note**: The `endian` is passed as a context argument to sub-types @@ -123,9 +125,9 @@ struct DekuTest { field_child: Child, } -let data: Vec = vec![0xAB, 0xCD, 0xAB, 0xCD, 0xEF, 0xBE]; +let data: &[u8] = &[0xAB, 0xCD, 0xAB, 0xCD, 0xEF, 0xBE]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -137,7 +139,7 @@ assert_eq!( ); let value: Vec = value.try_into().unwrap(); -assert_eq!(data, value); +assert_eq!(&*data, value); ``` # magic @@ -156,9 +158,9 @@ struct DekuTest { data: u8 } -let data: Vec = vec![b'd', b'e', b'k', b'u', 50]; +let data: &[u8] = &[b'd', b'e', b'k', b'u', 50]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { data: 50 }, @@ -183,9 +185,9 @@ struct DekuTest { data: u8 } -let data: Vec = vec![0x00, 0x01, 0x02]; +let data: &[u8] = &[0x00, 0x01, 0x02]; -let value = DekuTest::try_from(data.as_ref()); +let value = DekuTest::try_from(data); assert_eq!( Err(DekuError::Assertion("DekuTest.data field failed assertion: * data >= 8".into())), @@ -207,9 +209,9 @@ struct DekuTest { data: u8, } -let data: Vec = vec![0x01]; +let data: &[u8] = &[0x01]; -let mut value = DekuTest::try_from(data.as_ref()).unwrap(); +let mut value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { data: 0x01 }, @@ -245,9 +247,9 @@ struct DekuTest { field_c: u8, // defaults to size_of*8 } -let data: Vec = vec![0b11_101010, 0xFF]; +let data: &[u8] = &[0b11_101010, 0xFF]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -259,7 +261,7 @@ assert_eq!( ); let value: Vec = value.try_into().unwrap(); -assert_eq!(data, value); +assert_eq!(&*data, value); ``` # bytes @@ -279,9 +281,9 @@ struct DekuTest { field_b: u8, // defaults to size_of } -let data: Vec = vec![0xAB, 0xCD, 0xFF]; +let data: &[u8] = &[0xAB, 0xCD, 0xFF]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -311,9 +313,9 @@ struct DekuTest { items: Vec, } -let data: Vec = vec![0x02, 0xAB, 0xCD]; +let data: &[u8] = &[0x02, 0xAB, 0xCD]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -354,9 +356,9 @@ struct DekuTest { items: Vec, } -let data: Vec = vec![0x04, 0xAB, 0xBC, 0xDE, 0xEF]; +let data: &[u8] = &[0x04, 0xAB, 0xBC, 0xDE, 0xEF]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -369,7 +371,7 @@ assert_eq!( ); let value: Vec = value.try_into().unwrap(); -assert_eq!(data, value); +assert_eq!(&*data, value); ``` **Note**: See [update](#update) for more information on the attribute! @@ -401,8 +403,8 @@ struct DekuTest { string: Vec } -let data: Vec = vec![b'H', b'e', b'l', b'l', b'o', 0]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let data: &[u8] = &[b'H', b'e', b'l', b'l', b'o', 0]; +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -429,10 +431,10 @@ struct DekuTest { items: Vec, } -let data: Vec = vec![0x02, 0xAB, 0xCD]; +let data: &[u8] = &[0x02, 0xAB, 0xCD]; // `mut` so it can be updated -let mut value = DekuTest::try_from(data.as_ref()).unwrap(); +let mut value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { count: 0x02, items: vec![0xAB, 0xCD] }, @@ -478,9 +480,9 @@ struct DekuTest { items: Vec, } -let data: Vec = vec![0x01, 0xBE, 0xEF]; +let data: &[u8] = &[0x01, 0xBE, 0xEF]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -546,9 +548,9 @@ struct DekuTest { field_c: u8, } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { field_a: 0x01, field_b: None, field_c: 0x02 }, @@ -572,9 +574,9 @@ pub struct DekuTest { pub field_b: u8, } -let data: Vec = vec![0xAA, 0xBB, 0xCC, 0xDD]; +let data: &[u8] = &[0xAA, 0xBB, 0xCC, 0xDD]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -605,9 +607,9 @@ struct DekuTest { field_b: u8, } -let data: Vec = vec![0b10_01_1001]; +let data: &[u8] = &[0b10_01_1001]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -637,9 +639,9 @@ pub struct DekuTest { pub field_b: u8, } -let data: Vec = vec![0xAA, 0xBB, 0xCC, 0xDD]; +let data: &[u8] = &[0xAA, 0xBB, 0xCC, 0xDD]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -670,9 +672,9 @@ struct DekuTest { field_b: u8, } -let data: Vec = vec![0b10_01_1001]; +let data: &[u8] = &[0b10_01_1001]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -708,9 +710,9 @@ struct DekuTest { field_d: Option, } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { field_a: 0x01, field_b: Some(0x02), field_c: Some(0x05), field_d: Some(0x06)}, @@ -742,9 +744,9 @@ struct DekuTest { field_c: u8, } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { field_a: 0x01, field_b: Some(0x01), field_c: 0x02 }, @@ -777,9 +779,9 @@ impl DekuTest { } } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { field_a: "1".to_string(), field_b: "2".to_string() }, @@ -800,7 +802,7 @@ use deku::prelude::*; # #[derive(PartialEq, Debug, DekuRead, DekuWrite)] struct DekuTest { #[deku( - reader = "DekuTest::read(deku::rest)", + reader = "DekuTest::read(deku::reader)", writer = "DekuTest::write(deku::output, &self.field_a)" )] field_a: String, @@ -808,11 +810,11 @@ struct DekuTest { impl DekuTest { /// Read and convert to String - fn read( - rest: &BitSlice, - ) -> Result<(&BitSlice, String), DekuError> { - let (rest, value) = u8::read(rest, ())?; - Ok((rest, value.to_string())) + fn read( + reader: &mut deku::reader::Reader, + ) -> Result { + let value = u8::from_reader_with_ctx(reader, ())?; + Ok(value.to_string()) } /// Parse from String to u8 and write @@ -822,9 +824,9 @@ impl DekuTest { } } -let data: Vec = vec![0x01]; +let data: &[u8] = &[0x01]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { field_a: "1".to_string() }, @@ -832,7 +834,7 @@ assert_eq!( ); let value: Vec = value.try_into().unwrap(); -assert_eq!(data, value); +assert_eq!(data, &*value); ``` # ctx @@ -852,9 +854,12 @@ for example `#[deku("a, b")]` 2. `endian`, `bytes`, `bits` attributes declared on the top-level - These are prepended to the list of ctx variables +**Note**: The `enum` or `struct` that uses `ctx` will not implement [DekuContainerRead](crate::DekuContainerRead) or [DekuContainerWrite](crate::DekuContainerWrite) unless [ctx_default](#ctx_default) is also used. + Example ```rust # use deku::prelude::*; +# use std::io::Cursor; #[derive(DekuRead, DekuWrite)] #[deku(ctx = "a: u8")] struct Subtype { @@ -869,9 +874,10 @@ struct Test { sub: Subtype } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; +let mut cursor = Cursor::new(data); -let (rest, value) = Test::from_bytes((&data[..], 0)).unwrap(); +let (amt_read, value) = Test::from_reader((&mut cursor, 0)).unwrap(); assert_eq!(value.a, 0x01); assert_eq!(value.sub.b, 0x01 + 0x02) ``` @@ -920,6 +926,7 @@ values for the context Example: ```rust # use deku::prelude::*; +# use std::io::Cursor; #[derive(DekuRead, DekuWrite)] #[deku(ctx = "a: u8", ctx_default = "1")] // Defaults `a` to 1 struct Subtype { @@ -934,18 +941,20 @@ struct Test { sub: Subtype } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; +let mut cursor = Cursor::new(data); // Use with context from `Test` -let (rest, value) = Test::from_bytes((&data[..], 0)).unwrap(); +let (amt_read, value) = Test::from_reader((&mut cursor, 0)).unwrap(); assert_eq!(value.a, 0x01); assert_eq!(value.sub.b, 0x01 + 0x02); // Use as a stand-alone container, using defaults -// Note: `from_bytes` is now available on `SubType` -let data: Vec = vec![0x02]; +// Note: `from_reader` is now available on `SubType` +let data: &[u8] = &[0x02]; +let mut cursor = Cursor::new(data); -let (rest, value) = Subtype::from_bytes((&data[..], 0)).unwrap(); +let (amt_read, value) = Subtype::from_reader((&mut cursor, 0)).unwrap(); assert_eq!(value.b, 0x01 + 0x02) ``` @@ -979,8 +988,8 @@ enum MyEnum { VariantB, } -let data: Vec = vec![0x01_u8, 0xff, 0xab]; -let ret_read = DekuTest::try_from(data.as_ref()).unwrap(); +let data: &[u8] = &[0x01_u8, 0xff, 0xab]; +let ret_read = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest { @@ -992,7 +1001,7 @@ assert_eq!( ); let ret_write: Vec = ret_read.try_into().unwrap(); -assert_eq!(ret_write, data) +assert_eq!(&*ret_write, data) ``` ## id (variant) @@ -1007,6 +1016,7 @@ or [id (top-level)](#id-top-level) Example: ```rust # use deku::prelude::*; +# use std::io::Cursor; # use std::convert::{TryInto, TryFrom}; # #[derive(PartialEq, Debug, DekuRead, DekuWrite)] #[deku(type = "u8")] @@ -1017,9 +1027,10 @@ enum DekuTest { VariantB(u8, u16), } -let data: Vec = vec![0x01, 0xFF, 0x02, 0xAB, 0xEF, 0xBE]; +let data: &[u8] = &[0x01, 0xFF, 0x02, 0xAB, 0xEF, 0xBE]; +let mut cursor = Cursor::new(data); -let (rest, value) = DekuTest::from_bytes((data.as_ref(), 0)).unwrap(); +let (amt_read, value) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest::VariantA(0xFF), @@ -1029,7 +1040,7 @@ assert_eq!( let variant_bytes: Vec = value.try_into().unwrap(); assert_eq!(vec![0x01, 0xFF], variant_bytes); -let (rest, value) = DekuTest::from_bytes(rest).unwrap(); +let (amt_read, value) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest::VariantB(0xAB, 0xBEEF), @@ -1043,6 +1054,7 @@ assert_eq!(vec![0x02, 0xAB, 0xEF, 0xBE], variant_bytes); Example discriminant ```rust # use deku::prelude::*; +# use std::io::Cursor; # use std::convert::{TryInto, TryFrom}; # #[derive(PartialEq, Debug, DekuRead, DekuWrite)] #[deku(type = "u8")] @@ -1051,9 +1063,10 @@ enum DekuTest { VariantB, } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; +let mut cursor = Cursor::new(data); -let (rest, value) = DekuTest::from_bytes((data.as_ref(), 0)).unwrap(); +let (amt_read, value) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest::VariantA, @@ -1063,7 +1076,7 @@ assert_eq!( let variant_bytes: Vec = value.try_into().unwrap(); assert_eq!(vec![0x01], variant_bytes); -let (rest, value) = DekuTest::from_bytes(rest).unwrap(); +let (rest, value) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest::VariantB, @@ -1083,6 +1096,7 @@ The enum variant must have space to store the identifier for proper writing. Example: ```rust # use deku::prelude::*; +# use std::io::Cursor; # use std::convert::{TryInto, TryFrom}; # #[derive(PartialEq, Debug, DekuRead, DekuWrite)] #[deku(type = "u8")] @@ -1097,9 +1111,10 @@ enum DekuTest { VariantC(u8), } -let data: Vec = vec![0x03, 0xFF]; +let data: &[u8] = &[0x03, 0xFF]; +let mut cursor = Cursor::new(data); -let (rest, value) = DekuTest::from_bytes((data.as_ref(), 0)).unwrap(); +let (amt_read, value) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest::VariantB { id: 0x03 }, @@ -1109,7 +1124,7 @@ assert_eq!( let variant_bytes: Vec = value.try_into().unwrap(); assert_eq!(vec![0x03], variant_bytes); -let (rest, value) = DekuTest::from_bytes(rest).unwrap(); +let (rest, value) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest::VariantC(0xFF), @@ -1133,6 +1148,7 @@ Set the bit size of the enum variant `id` Example: ```rust # use deku::prelude::*; +# use std::io::Cursor; # use std::convert::{TryInto, TryFrom}; # #[derive(PartialEq, Debug, DekuRead, DekuWrite)] #[deku(type = "u8", bits = "4")] @@ -1141,9 +1157,10 @@ enum DekuTest { VariantA( #[deku(bits = "4")] u8, u8), } -let data: Vec = vec![0b1001_0110, 0xFF]; +let data: &[u8] = &[0b1001_0110, 0xFF]; +let mut cursor = Cursor::new(data); -let (rest, value) = DekuTest::from_bytes((&data, 0)).unwrap(); +let (amt_read, value) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!( DekuTest::VariantA(0b0110, 0xFF), @@ -1171,9 +1188,9 @@ enum DekuTest { VariantA(u8), } -let data: Vec = vec![0xEF, 0xBE, 0xFF]; +let data: &[u8] = &[0xEF, 0xBE, 0xFF]; -let value = DekuTest::try_from(data.as_ref()).unwrap(); +let value = DekuTest::try_from(data).unwrap(); assert_eq!( DekuTest::VariantA(0xFF), diff --git a/src/ctx.rs b/src/ctx.rs index db2a1e04..37283288 100644 --- a/src/ctx.rs +++ b/src/ctx.rs @@ -23,6 +23,7 @@ impl Endian { /// [`Endian::default`], but const. /// /// [`Endian::default`]: Endian::default() + #[inline] pub const fn new() -> Self { #[cfg(target_endian = "little")] let endian = Endian::Little; @@ -34,11 +35,13 @@ impl Endian { } /// Is it little endian + #[inline] pub fn is_le(self) -> bool { self == Endian::Little } /// Is it big endian + #[inline] pub fn is_be(self) -> bool { self == Endian::Big } @@ -46,6 +49,7 @@ impl Endian { impl Default for Endian { /// Return the endianness of the target's CPU. + #[inline] fn default() -> Self { Self::new() } @@ -58,11 +62,13 @@ impl FromStr for Endian { /// # Examples /// ```rust /// use std::str::FromStr; + /// /// use deku::ctx::Endian; /// assert_eq!(FromStr::from_str("little"), Ok(Endian::Little)); /// assert_eq!(FromStr::from_str("big"), Ok(Endian::Big)); /// assert!(::from_str("not an endian").is_err()); /// ``` + #[inline] fn from_str(s: &str) -> Result { match s { "little" => Ok(Endian::Little), @@ -92,24 +98,28 @@ pub enum Limit bool> { } impl From for Limit bool> { + #[inline] fn from(n: usize) -> Self { Limit::Count(n) } } impl FnMut(&'a T) -> bool> From for Limit { + #[inline] fn from(predicate: Predicate) -> Self { Limit::Until(predicate, PhantomData) } } impl From for Limit bool> { + #[inline] fn from(size: ByteSize) -> Self { Limit::ByteSize(size) } } impl From for Limit bool> { + #[inline] fn from(size: BitSize) -> Self { Limit::BitSize(size) } @@ -119,6 +129,7 @@ impl FnMut(&'a T) -> bool> Limit { /// Constructs a new Limit that reads until the given predicate returns true /// The predicate is given a reference to the latest read value and must return /// true to stop reading + #[inline] pub fn new_until(predicate: Predicate) -> Self { predicate.into() } @@ -126,16 +137,19 @@ impl FnMut(&'a T) -> bool> Limit { impl Limit bool> { /// Constructs a new Limit that reads until the given number of elements are read + #[inline] pub fn new_count(count: usize) -> Self { count.into() } /// Constructs a new Limit that reads until the given size + #[inline] pub fn new_bit_size(size: BitSize) -> Self { size.into() } /// Constructs a new Limit that reads until the given size + #[inline] pub fn new_byte_size(size: ByteSize) -> Self { size.into() } @@ -151,7 +165,8 @@ pub struct BitSize(pub usize); impl BitSize { /// Convert the size in bytes to a bit size. - const fn bits_from_bytes(byte_size: usize) -> Self { + #[inline] + const fn bits_from_reader(byte_size: usize) -> Self { // TODO: use checked_mul when const_option is enabled // link: https://github.com/rust-lang/rust/issues/67441 Self(byte_size * 8) @@ -164,12 +179,14 @@ impl BitSize { /// /// assert_eq!(BitSize::of::(), BitSize(4 * 8)); /// ``` + #[inline] pub const fn of() -> Self { - Self::bits_from_bytes(core::mem::size_of::()) + Self::bits_from_reader(core::mem::size_of::()) } /// Returns the bit size of the pointed-to value + #[inline] pub fn of_val(val: &T) -> Self { - Self::bits_from_bytes(core::mem::size_of_val(val)) + Self::bits_from_reader(core::mem::size_of_val(val)) } } diff --git a/src/error.rs b/src/error.rs index bfeabf96..061f58dc 100644 --- a/src/error.rs +++ b/src/error.rs @@ -2,7 +2,8 @@ #![cfg(feature = "alloc")] -use alloc::{format, string::String}; +use alloc::format; +use alloc::string::String; /// Number of bits needed to retry parsing #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/src/impls/bool.rs b/src/impls/bool.rs index 478b5769..71e85a16 100644 --- a/src/impls/bool.rs +++ b/src/impls/bool.rs @@ -1,21 +1,22 @@ -use crate::{DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; +use no_std_io::io::Read; #[cfg(feature = "alloc")] use alloc::format; -impl<'a, Ctx> DekuRead<'a, Ctx> for bool +use bitvec::prelude::*; + +use crate::{DekuError, DekuReader, DekuWrite}; + +impl<'a, Ctx> DekuReader<'a, Ctx> for bool where Ctx: Copy, - u8: DekuRead<'a, Ctx>, + u8: DekuReader<'a, Ctx>, { - /// wrapper around u8::read with consideration to context, such as bit size - /// true if the result of the read is `1`, false if `0` and error otherwise - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, inner_ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> { - let (rest, val) = u8::read(input, inner_ctx)?; + ) -> Result { + let val = u8::from_reader_with_ctx(reader, inner_ctx)?; let ret = match val { 0x01 => Ok(true), @@ -23,7 +24,7 @@ where _ => Err(DekuError::Parse(format!("cannot parse bool value: {val}",))), }?; - Ok((rest, ret)) + Ok(ret) } } @@ -42,10 +43,14 @@ where #[cfg(test)] mod tests { - use super::*; use hexlit::hex; + use no_std_io::io::Cursor; use rstest::rstest; + use crate::reader::Reader; + + use super::*; + #[rstest(input, expected, case(&hex!("00"), false), case(&hex!("01"), true), @@ -53,25 +58,20 @@ mod tests { #[should_panic(expected = "Parse(\"cannot parse bool value: 2\")")] case(&hex!("02"), false), )] - fn test_bool(input: &[u8], expected: bool) { - let bit_slice = input.view_bits::(); - let (rest, res_read) = bool::read(bit_slice, ()).unwrap(); + fn test_bool(mut input: &[u8], expected: bool) { + let mut reader = Reader::new(&mut input); + let res_read = bool::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!(expected, res_read); - assert!(rest.is_empty()); - - let mut res_write = bitvec![u8, Msb0;]; - res_read.write(&mut res_write, ()).unwrap(); - assert_eq!(input.to_vec(), res_write.into_vec()); } #[test] fn test_bool_with_context() { let input = &[0b01_000000]; - let bit_slice = input.view_bits::(); - let (rest, res_read) = bool::read(bit_slice, crate::ctx::BitSize(2)).unwrap(); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = bool::from_reader_with_ctx(&mut reader, crate::ctx::BitSize(2)).unwrap(); assert!(res_read); - assert_eq!(6, rest.len()); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, ()).unwrap(); diff --git a/src/impls/boxed.rs b/src/impls/boxed.rs index 9ce5465e..bd733efa 100644 --- a/src/impls/boxed.rs +++ b/src/impls/boxed.rs @@ -1,22 +1,24 @@ -use crate::{ctx::Limit, DekuError, DekuRead, DekuWrite}; -use alloc::{boxed::Box, vec::Vec}; +use no_std_io::io::Read; + +use alloc::boxed::Box; +use alloc::vec::Vec; + use bitvec::prelude::*; -impl<'a, T, Ctx> DekuRead<'a, Ctx> for Box +use crate::ctx::Limit; +use crate::{DekuError, DekuReader, DekuWrite}; + +impl<'a, T, Ctx> DekuReader<'a, Ctx> for Box where - T: DekuRead<'a, Ctx>, + T: DekuReader<'a, Ctx>, Ctx: Copy, { - /// Read a T from input and store as Box - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, inner_ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - let (rest, val) = ::read(input, inner_ctx)?; - Ok((rest, Box::new(val))) + ) -> Result { + let val = ::from_reader_with_ctx(reader, inner_ctx)?; + Ok(Box::new(val)) } } @@ -31,23 +33,19 @@ where } } -impl<'a, T, Ctx, Predicate> DekuRead<'a, (Limit, Ctx)> for Box<[T]> +impl<'a, T, Ctx, Predicate> DekuReader<'a, (Limit, Ctx)> for Box<[T]> where - T: DekuRead<'a, Ctx>, + T: DekuReader<'a, Ctx>, Ctx: Copy, Predicate: FnMut(&T) -> bool, { - /// Read `T`s until the given limit - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, (limit, inner_ctx): (Limit, Ctx), - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { + ) -> Result { // use Vec's implementation and convert to Box<[T]> - let (rest, val) = >::read(input, (limit, inner_ctx))?; - Ok((rest, val.into_boxed_slice())) + let val = >::from_reader_with_ctx(reader, (limit, inner_ctx))?; + Ok(val.into_boxed_slice()) } } @@ -67,23 +65,25 @@ where #[cfg(test)] mod tests { + use no_std_io::io::Cursor; + use rstest::rstest; + use super::*; use crate::ctx::*; use crate::native_endian; - use rstest::rstest; + use crate::reader::Reader; - #[rstest(input, expected, expected_rest, + #[rstest(input, expected, case( &[0xEF, 0xBE], Box::new(native_endian!(0xBEEF_u16)), - bits![u8, Msb0;] ), )] - fn test_boxed(input: &[u8], expected: Box, expected_rest: &BitSlice) { - let bit_slice = input.view_bits::(); - let (rest, res_read) = >::read(bit_slice, ()).unwrap(); + fn test_boxed(input: &[u8], expected: Box) { + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = >::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, ()).unwrap(); @@ -91,32 +91,40 @@ mod tests { } // Note: Copied tests from vec.rs impl - #[rstest(input, endian, bit_size, limit, expected, expected_rest, expected_write, - case::normal_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), 2.into(), vec![0xBBAA, 0xDDCC].into_boxed_slice(), bits![u8, Msb0;], vec![0xAA, 0xBB, 0xCC, 0xDD]), - case::normal_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), 2.into(), vec![0xAABB, 0xCCDD].into_boxed_slice(), bits![u8, Msb0;], vec![0xAA, 0xBB, 0xCC, 0xDD]), - case::predicate_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), (|v: &u16| *v == 0xBBAA).into(), vec![0xBBAA].into_boxed_slice(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::predicate_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), (|v: &u16| *v == 0xAABB).into(), vec![0xAABB].into_boxed_slice(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::bytes_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), BitSize(16).into(), vec![0xBBAA].into_boxed_slice(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::bytes_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), BitSize(16).into(), vec![0xAABB].into_boxed_slice(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), + #[rstest(input, endian, bit_size, limit, expected, expected_rest_bits, expected_rest_bytes, expected_write, + case::normal_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), 2.into(), vec![0xBBAA, 0xDDCC].into_boxed_slice(), bits![u8, Msb0;], &[], vec![0xAA, 0xBB, 0xCC, 0xDD]), + case::normal_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), 2.into(), vec![0xAABB, 0xCCDD].into_boxed_slice(), bits![u8, Msb0;], &[], vec![0xAA, 0xBB, 0xCC, 0xDD]), + case::predicate_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), (|v: &u16| *v == 0xBBAA).into(), vec![0xBBAA].into_boxed_slice(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::predicate_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), (|v: &u16| *v == 0xAABB).into(), vec![0xAABB].into_boxed_slice(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::bytes_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), BitSize(16).into(), vec![0xBBAA].into_boxed_slice(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::bytes_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), BitSize(16).into(), vec![0xAABB].into_boxed_slice(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), )] - fn test_boxed_slice bool>( + fn test_boxed_slice_from_reader_with_ctx bool>( input: &[u8], endian: Endian, bit_size: Option, limit: Limit, expected: Box<[u16]>, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], expected_write: Vec, ) { - let bit_slice = input.view_bits::(); - // Unwrap here because all test cases are `Some`. let bit_size = bit_size.unwrap(); - let (rest, res_read) = - >::read(bit_slice, (limit, (endian, BitSize(bit_size)))).unwrap(); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = + >::from_reader_with_ctx(&mut reader, (limit, (endian, BitSize(bit_size)))) + .unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + cursor.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); let mut res_write = bitvec![u8, Msb0;]; res_read diff --git a/src/impls/cow.rs b/src/impls/cow.rs index e685aac2..d7a4ff56 100644 --- a/src/impls/cow.rs +++ b/src/impls/cow.rs @@ -1,22 +1,22 @@ -use crate::{DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; use std::borrow::{Borrow, Cow}; -impl<'a, T, Ctx> DekuRead<'a, Ctx> for Cow<'a, T> +use no_std_io::io::Read; + +use bitvec::prelude::*; + +use crate::{DekuError, DekuReader, DekuWrite}; + +impl<'a, T, Ctx> DekuReader<'a, Ctx> for Cow<'a, T> where - T: DekuRead<'a, Ctx> + Clone, + T: DekuReader<'a, Ctx> + Clone, Ctx: Copy, { - /// Read a T from input and store as Cow - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, inner_ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - let (rest, val) = ::read(input, inner_ctx)?; - Ok((rest, Cow::Owned(val))) + ) -> Result { + let val = ::from_reader_with_ctx(reader, inner_ctx)?; + Ok(Cow::Owned(val)) } } @@ -33,22 +33,23 @@ where #[cfg(test)] mod tests { - use super::*; - use crate::native_endian; + use no_std_io::io::Cursor; use rstest::rstest; - #[rstest(input, expected, expected_rest, + use super::*; + use crate::{native_endian, reader::Reader}; + + #[rstest(input, expected, case( &[0xEF, 0xBE], Cow::Owned(native_endian!(0xBEEF_u16)), - bits![u8, Msb0;] ), )] - fn test_cow(input: &[u8], expected: Cow, expected_rest: &BitSlice) { - let bit_slice = input.view_bits::(); - let (rest, res_read) = >::read(bit_slice, ()).unwrap(); + fn test_cow(input: &[u8], expected: Cow) { + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = >::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, ()).unwrap(); diff --git a/src/impls/cstring.rs b/src/impls/cstring.rs index 692394fe..c95e41ff 100644 --- a/src/impls/cstring.rs +++ b/src/impls/cstring.rs @@ -1,7 +1,11 @@ -use crate::{ctx::*, DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; +use no_std_io::io::Read; use std::ffi::CString; +use bitvec::prelude::*; + +use crate::{ctx::*, DekuReader}; +use crate::{DekuError, DekuWrite}; + impl DekuWrite for CString where u8: DekuWrite, @@ -12,59 +16,56 @@ where } } -impl<'a, Ctx: Copy> DekuRead<'a, Ctx> for CString +impl<'a, Ctx: Copy> DekuReader<'a, Ctx> for CString where - u8: DekuRead<'a, Ctx>, + u8: DekuReader<'a, Ctx>, { - fn read( - input: &'a BitSlice, - ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - let (rest, mut bytes) = Vec::read(input, (Limit::from(|b: &u8| *b == 0x00), ctx))?; - - // TODO: use from_vec_with_nul instead once stable + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, + inner_ctx: Ctx, + ) -> Result { + let bytes = + Vec::from_reader_with_ctx(reader, (Limit::from(|b: &u8| *b == 0x00), inner_ctx))?; - // Remove null byte - let nul_byte = bytes.pop(); - if nul_byte != Some(0x00) { - return Err(DekuError::Unexpected("Expected nul byte".to_string())); - } - - let value = CString::new(bytes) + let value = CString::from_vec_with_nul(bytes) .map_err(|e| DekuError::Parse(format!("Failed to convert Vec to CString: {e}")))?; - Ok((rest, value)) + Ok(value) } } #[cfg(test)] mod tests { - use super::*; + use no_std_io::io::Cursor; use rstest::rstest; + use crate::reader::Reader; + + use super::*; + #[rstest(input, expected, expected_rest, case( &[b't', b'e', b's', b't', b'\0'], CString::new("test").unwrap(), - bits![u8, Msb0;] + &[], ), case( &[b't', b'e', b's', b't', b'\0', b'a'], CString::new("test").unwrap(), - [b'a'].view_bits::(), + &[b'a'], ), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case(&[b't', b'e', b's', b't'], CString::new("test").unwrap(), bits![u8, Msb0;]), + case(&[b't', b'e', b's', b't'], CString::new("test").unwrap(), &[]), )] - fn test_cstring(input: &[u8], expected: CString, expected_rest: &BitSlice) { - let bit_slice = input.view_bits::(); - let (rest, res_read) = CString::read(bit_slice, ()).unwrap(); + fn test_cstring(input: &[u8], expected: CString, expected_rest: &[u8]) { + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = CString::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + let mut buf = vec![]; + cursor.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest, buf); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, ()).unwrap(); diff --git a/src/impls/hashmap.rs b/src/impls/hashmap.rs index 9b0d94a3..b51ea801 100644 --- a/src/impls/hashmap.rs +++ b/src/impls/hashmap.rs @@ -1,8 +1,12 @@ -use crate::{ctx::*, DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; use std::collections::HashMap; use std::hash::{BuildHasher, Hash}; +use bitvec::prelude::*; +use no_std_io::io::Read; + +use crate::ctx::*; +use crate::{DekuError, DekuReader, DekuWrite}; + /// Read `K, V`s into a hashmap until a given predicate returns true /// * `capacity` - an optional capacity to pre-allocate the hashmap with /// * `ctx` - The context required by `K, V`. It will be passed to every `K, V` when constructing. @@ -11,66 +15,63 @@ use std::hash::{BuildHasher, Hash}; /// and a borrow of the latest value to have been read. It should return `true` if reading /// should now stop, and `false` otherwise #[allow(clippy::type_complexity)] -fn read_hashmap_with_predicate< - 'a, - K: DekuRead<'a, Ctx> + Eq + Hash, - V: DekuRead<'a, Ctx>, - S: BuildHasher + Default, - Ctx: Copy, - Predicate: FnMut(usize, &(K, V)) -> bool, ->( - input: &'a BitSlice, +fn from_reader_with_ctx_hashmap_with_predicate<'a, K, V, S, Ctx, Predicate, R: Read>( + reader: &mut crate::reader::Reader, capacity: Option, ctx: Ctx, mut predicate: Predicate, -) -> Result<(&'a BitSlice, HashMap), DekuError> { +) -> Result, DekuError> +where + K: DekuReader<'a, Ctx> + Eq + Hash, + V: DekuReader<'a, Ctx>, + S: BuildHasher + Default, + Ctx: Copy, + Predicate: FnMut(usize, &(K, V)) -> bool, +{ let mut res = HashMap::with_capacity_and_hasher(capacity.unwrap_or(0), S::default()); - let mut rest = input; let mut found_predicate = false; + let orig_bits_read = reader.bits_read; while !found_predicate { - let (new_rest, kv) = <(K, V)>::read(rest, ctx)?; - found_predicate = predicate( - unsafe { new_rest.as_bitptr().offset_from(input.as_bitptr()) } as usize, - &kv, - ); - res.insert(kv.0, kv.1); - rest = new_rest; + let val = <(K, V)>::from_reader_with_ctx(reader, ctx)?; + found_predicate = predicate(reader.bits_read - orig_bits_read, &val); + res.insert(val.0, val.1); } - Ok((rest, res)) + Ok(res) } -impl< - 'a, - K: DekuRead<'a, Ctx> + Eq + Hash, - V: DekuRead<'a, Ctx>, - S: BuildHasher + Default, - Ctx: Copy, - Predicate: FnMut(&(K, V)) -> bool, - > DekuRead<'a, (Limit<(K, V), Predicate>, Ctx)> for HashMap +impl<'a, K, V, S, Ctx, Predicate> DekuReader<'a, (Limit<(K, V), Predicate>, Ctx)> + for HashMap +where + K: DekuReader<'a, Ctx> + Eq + Hash, + V: DekuReader<'a, Ctx>, + S: BuildHasher + Default, + Ctx: Copy, + Predicate: FnMut(&(K, V)) -> bool, { - /// Read `K, V`s until the given limit - /// * `limit` - the limiting factor on the amount of `K, V`s to read - /// * `inner_ctx` - The context required by `K, V`. It will be passed to every `K, V`s when constructing. + /// Read `T`s until the given limit + /// * `limit` - the limiting factor on the amount of `T`s to read + /// * `inner_ctx` - The context required by `T`. It will be passed to every `T`s when constructing. /// # Examples /// ```rust /// # use deku::ctx::*; - /// # use deku::DekuRead; - /// # use deku::bitvec::BitView; + /// # use deku::DekuReader; /// # use std::collections::HashMap; - /// let input: Vec = vec![100, 1, 2, 3, 4]; - /// let (rest, map) = HashMap::::read(input.view_bits(), (1.into(), Endian::Little)).unwrap(); - /// assert!(rest.is_empty()); + /// # use std::io::Cursor; + /// let mut input = Cursor::new(vec![100, 1, 2, 3, 4]); + /// let mut reader = deku::reader::Reader::new(&mut input); + /// let map = + /// HashMap::::from_reader_with_ctx(&mut reader, (1.into(), Endian::Little)).unwrap(); /// let mut expected = HashMap::::default(); /// expected.insert(100, 0x04030201); /// assert_eq!(expected, map) /// ``` - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, (limit, inner_ctx): (Limit<(K, V), Predicate>, Ctx), - ) -> Result<(&'a BitSlice, Self), DekuError> + ) -> Result where Self: Sized, { @@ -79,20 +80,28 @@ impl< Limit::Count(mut count) => { // Handle the trivial case of reading an empty hashmap if count == 0 { - return Ok((input, HashMap::::default())); + return Ok(HashMap::::default()); } // Otherwise, read until we have read `count` elements - read_hashmap_with_predicate(input, Some(count), inner_ctx, move |_, _| { - count -= 1; - count == 0 - }) + from_reader_with_ctx_hashmap_with_predicate( + reader, + Some(count), + inner_ctx, + move |_, _| { + count -= 1; + count == 0 + }, + ) } // Read until a given predicate returns true - Limit::Until(mut predicate, _) => { - read_hashmap_with_predicate(input, None, inner_ctx, move |_, kv| predicate(kv)) - } + Limit::Until(mut predicate, _) => from_reader_with_ctx_hashmap_with_predicate( + reader, + None, + inner_ctx, + move |_, kv| predicate(kv), + ), // Read until a given quantity of bits have been read Limit::BitSize(size) => { @@ -100,48 +109,53 @@ impl< // Handle the trivial case of reading an empty hashmap if bit_size == 0 { - return Ok((input, HashMap::::default())); + return Ok(HashMap::::default()); } - read_hashmap_with_predicate(input, None, inner_ctx, move |read_bits, _| { - read_bits == bit_size - }) + from_reader_with_ctx_hashmap_with_predicate( + reader, + None, + inner_ctx, + move |read_bits, _| read_bits == bit_size, + ) } - // Read until a given quantity of bits have been read + // Read until a given quantity of byte bits have been read Limit::ByteSize(size) => { let bit_size = size.0 * 8; // Handle the trivial case of reading an empty hashmap if bit_size == 0 { - return Ok((input, HashMap::::default())); + return Ok(HashMap::::default()); } - read_hashmap_with_predicate(input, None, inner_ctx, move |read_bits, _| { - read_bits == bit_size - }) + from_reader_with_ctx_hashmap_with_predicate( + reader, + None, + inner_ctx, + move |read_bits, _| read_bits == bit_size, + ) } } } } -impl< - 'a, - K: DekuRead<'a> + Eq + Hash, - V: DekuRead<'a>, - S: BuildHasher + Default, - Predicate: FnMut(&(K, V)) -> bool, - > DekuRead<'a, Limit<(K, V), Predicate>> for HashMap +impl<'a, K, V, S, Predicate> DekuReader<'a, Limit<(K, V), Predicate>> for HashMap +where + K: DekuReader<'a> + Eq + Hash, + V: DekuReader<'a>, + S: BuildHasher + Default, + Predicate: FnMut(&(K, V)) -> bool, { /// Read `K, V`s until the given limit from input for types which don't require context. - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, limit: Limit<(K, V), Predicate>, - ) -> Result<(&'a BitSlice, Self), DekuError> + ) -> Result where Self: Sized, { - Self::read(input, (limit, ())) + Self::from_reader_with_ctx(reader, (limit, ())) } } @@ -173,10 +187,14 @@ impl, V: DekuWrite, S, Ctx: Copy> DekuWrite for Hash #[cfg(test)] mod tests { - use super::*; + use no_std_io::io::Cursor; use rstest::rstest; use rustc_hash::FxHashMap; + use crate::reader::Reader; + + use super::*; + // Macro to create a deterministic HashMap for tests // This is needed for tests since the default HashMap Hasher // RandomState will Hash the keys different for each run of the test cases @@ -194,45 +212,55 @@ mod tests { }; ); - #[rstest(input, endian, bit_size, limit, expected, expected_rest, - case::count_0([0xAA].as_ref(), Endian::Little, Some(8), 0.into(), FxHashMap::default(), bits![u8, Msb0; 1, 0, 1, 0, 1, 0, 1, 0]), - case::count_1([0x01, 0xAA, 0x02, 0xBB].as_ref(), Endian::Little, Some(8), 1.into(), fxhashmap!{0x01 => 0xAA}, bits![u8, Msb0; 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1]), - case::count_2([0x01, 0xAA, 0x02, 0xBB, 0xBB].as_ref(), Endian::Little, Some(8), 2.into(), fxhashmap!{0x01 => 0xAA, 0x02 => 0xBB}, bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::until_null([0x01, 0xAA, 0, 0, 0xBB].as_ref(), Endian::Little, None, (|kv: &(u8, u8)| kv.0 == 0u8 && kv.1 == 0u8).into(), fxhashmap!{0x01 => 0xAA, 0 => 0}, bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::until_bits([0x01, 0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(16).into(), fxhashmap!{0x01 => 0xAA}, bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::bits_6([0b0000_0100, 0b1111_0000, 0b1000_0000].as_ref(), Endian::Little, Some(6), 2.into(), fxhashmap!{0x01 => 0x0F, 0x02 => 0}, bits![u8, Msb0;]), + #[rstest(input, endian, bit_size, limit, expected, expected_rest_bits, expected_rest_bytes, + case::count_0([0xAA].as_ref(), Endian::Little, Some(8), 0.into(), FxHashMap::default(), bits![u8, Msb0;], &[0xaa]), + case::count_1([0x01, 0xAA, 0x02, 0xBB].as_ref(), Endian::Little, Some(8), 1.into(), fxhashmap!{0x01 => 0xAA}, bits![u8, Msb0;], &[0x02, 0xbb]), + case::count_2([0x01, 0xAA, 0x02, 0xBB, 0xBB].as_ref(), Endian::Little, Some(8), 2.into(), fxhashmap!{0x01 => 0xAA, 0x02 => 0xBB}, bits![u8, Msb0;], &[0xbb]), + case::until_null([0x01, 0xAA, 0, 0, 0xBB].as_ref(), Endian::Little, None, (|kv: &(u8, u8)| kv.0 == 0u8 && kv.1 == 0u8).into(), fxhashmap!{0x01 => 0xAA, 0 => 0}, bits![u8, Msb0;], &[0xbb]), + case::until_bits([0x01, 0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(16).into(), fxhashmap!{0x01 => 0xAA}, bits![u8, Msb0;], &[0xbb]), + case::bits_6([0b0000_0100, 0b1111_0000, 0b1000_0000].as_ref(), Endian::Little, Some(6), 2.into(), fxhashmap!{0x01 => 0x0F, 0x02 => 0}, bits![u8, Msb0;], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), FxHashMap::default(), bits![u8, Msb0;]), + case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), FxHashMap::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(9), 1.into(), FxHashMap::default(), bits![u8, Msb0;]), + case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(9), 1.into(), FxHashMap::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(8), 2.into(), FxHashMap::default(), bits![u8, Msb0;]), + case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(8), 2.into(), FxHashMap::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data_until([0xAA].as_ref(), Endian::Little, Some(8), (|_: &(u8, u8)| false).into(), FxHashMap::default(), bits![u8, Msb0;]), + case::not_enough_data_until([0xAA].as_ref(), Endian::Little, Some(8), (|_: &(u8, u8)| false).into(), FxHashMap::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data_bits([0xAA].as_ref(), Endian::Little, Some(8), (BitSize(16)).into(), FxHashMap::default(), bits![u8, Msb0;]), + case::not_enough_data_bits([0xAA].as_ref(), Endian::Little, Some(8), (BitSize(16)).into(), FxHashMap::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::too_much_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(9), 1.into(), FxHashMap::default(), bits![u8, Msb0;]), + case::too_much_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(9), 1.into(), FxHashMap::default(), bits![u8, Msb0;], &[]), )] - fn test_hashmap_read bool>( + fn test_hashmap_read bool + Copy>( input: &[u8], endian: Endian, bit_size: Option, limit: Limit<(u8, u8), Predicate>, expected: FxHashMap, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], ) { - let bit_slice = input.view_bits::(); - - let (rest, res_read) = match bit_size { - Some(bit_size) => { - FxHashMap::::read(bit_slice, (limit, (endian, BitSize(bit_size)))).unwrap() + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = match bit_size { + Some(bit_size) => FxHashMap::::from_reader_with_ctx( + &mut reader, + (limit, (endian, BitSize(bit_size))), + ) + .unwrap(), + None => { + FxHashMap::::from_reader_with_ctx(&mut reader, (limit, (endian))).unwrap() } - None => FxHashMap::::read(bit_slice, (limit, (endian))).unwrap(), }; - assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + cursor.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); } #[rstest(input, endian, expected, @@ -245,27 +273,35 @@ mod tests { } // Note: These tests also exist in boxed.rs - #[rstest(input, endian, limit, expected, expected_rest, expected_write, - case::normal_le([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Little, 2.into(), fxhashmap!{0xBBAA => 0, 0xDDCC => 0}, bits![u8, Msb0;], vec![0xCC, 0xDD, 0, 0xAA, 0xBB, 0]), - case::normal_be([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Big, 2.into(), fxhashmap!{0xAABB => 0, 0xCCDD => 0}, bits![u8, Msb0;], vec![0xCC, 0xDD, 0, 0xAA, 0xBB, 0]), - case::predicate_le([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Little, (|kv: &(u16, u8)| kv.0 == 0xBBAA && kv.1 == 0).into(), fxhashmap!{0xBBAA => 0}, bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], vec![0xAA, 0xBB, 0]), - case::predicate_be([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Big, (|kv: &(u16, u8)| kv.0 == 0xAABB && kv.1 == 0).into(), fxhashmap!{0xAABB => 0}, bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], vec![0xAA, 0xBB, 0]), - case::bytes_le([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Little, BitSize(24).into(), fxhashmap!{0xBBAA => 0}, bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], vec![0xAA, 0xBB, 0]), - case::bytes_be([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Big, BitSize(24).into(), fxhashmap!{0xAABB => 0}, bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], vec![0xAA, 0xBB, 0]), + #[rstest(input, endian, limit, expected, expected_rest_bits, expected_rest_bytes, expected_write, + case::normal_le([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Little, 2.into(), fxhashmap!{0xBBAA => 0, 0xDDCC => 0}, bits![u8, Msb0;], &[], vec![0xCC, 0xDD, 0, 0xAA, 0xBB, 0]), + case::normal_be([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Big, 2.into(), fxhashmap!{0xAABB => 0, 0xCCDD => 0}, bits![u8, Msb0;], &[], vec![0xCC, 0xDD, 0, 0xAA, 0xBB, 0]), + case::predicate_le([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Little, (|kv: &(u16, u8)| kv.0 == 0xBBAA && kv.1 == 0).into(), fxhashmap!{0xBBAA => 0}, bits![u8, Msb0;], &[0xcc, 0xdd, 0], vec![0xAA, 0xBB, 0]), + case::predicate_be([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Big, (|kv: &(u16, u8)| kv.0 == 0xAABB && kv.1 == 0).into(), fxhashmap!{0xAABB => 0}, bits![u8, Msb0;], &[0xcc, 0xdd, 0], vec![0xAA, 0xBB, 0]), + case::bytes_le([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Little, BitSize(24).into(), fxhashmap!{0xBBAA => 0}, bits![u8, Msb0;], &[0xcc, 0xdd, 0], vec![0xAA, 0xBB, 0]), + case::bytes_be([0xAA, 0xBB, 0, 0xCC, 0xDD, 0].as_ref(), Endian::Big, BitSize(24).into(), fxhashmap!{0xAABB => 0}, bits![u8, Msb0;], &[0xcc, 0xdd, 0], vec![0xAA, 0xBB, 0]), )] - fn test_hashmap_read_write bool>( + fn test_hashmap_read_write bool + Copy>( input: &[u8], endian: Endian, limit: Limit<(u16, u8), Predicate>, expected: FxHashMap, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], expected_write: Vec, ) { - let bit_slice = input.view_bits::(); - - let (rest, res_read) = FxHashMap::::read(bit_slice, (limit, endian)).unwrap(); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = + FxHashMap::::from_reader_with_ctx(&mut reader, (limit, endian)).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + cursor.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, endian).unwrap(); diff --git a/src/impls/hashset.rs b/src/impls/hashset.rs index 7492e027..2e1fbd7e 100644 --- a/src/impls/hashset.rs +++ b/src/impls/hashset.rs @@ -1,8 +1,12 @@ -use crate::{ctx::*, DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; use std::collections::HashSet; use std::hash::{BuildHasher, Hash}; +use bitvec::prelude::*; +use no_std_io::io::Read; + +use crate::ctx::*; +use crate::{DekuError, DekuReader, DekuWrite}; + /// Read `T`s into a hashset until a given predicate returns true /// * `capacity` - an optional capacity to pre-allocate the hashset with /// * `ctx` - The context required by `T`. It will be passed to every `T` when constructing. @@ -11,43 +15,38 @@ use std::hash::{BuildHasher, Hash}; /// and a borrow of the latest value to have been read. It should return `true` if reading /// should now stop, and `false` otherwise #[allow(clippy::type_complexity)] -fn read_hashset_with_predicate< - 'a, - T: DekuRead<'a, Ctx> + Eq + Hash, - S: BuildHasher + Default, - Ctx: Copy, - Predicate: FnMut(usize, &T) -> bool, ->( - input: &'a BitSlice, +fn from_reader_with_ctx_hashset_with_predicate<'a, T, S, Ctx, Predicate, R: Read>( + reader: &mut crate::reader::Reader, capacity: Option, ctx: Ctx, mut predicate: Predicate, -) -> Result<(&'a BitSlice, HashSet), DekuError> { +) -> Result, DekuError> +where + T: DekuReader<'a, Ctx> + Eq + Hash, + S: BuildHasher + Default, + Ctx: Copy, + Predicate: FnMut(usize, &T) -> bool, +{ let mut res = HashSet::with_capacity_and_hasher(capacity.unwrap_or(0), S::default()); - let mut rest = input; let mut found_predicate = false; + let orig_bits_read = reader.bits_read; while !found_predicate { - let (new_rest, val) = ::read(rest, ctx)?; - found_predicate = predicate( - unsafe { new_rest.as_bitptr().offset_from(input.as_bitptr()) } as usize, - &val, - ); + let val = ::from_reader_with_ctx(reader, ctx)?; + found_predicate = predicate(reader.bits_read - orig_bits_read, &val); res.insert(val); - rest = new_rest; } - Ok((rest, res)) + Ok(res) } -impl< - 'a, - T: DekuRead<'a, Ctx> + Eq + Hash, - S: BuildHasher + Default, - Ctx: Copy, - Predicate: FnMut(&T) -> bool, - > DekuRead<'a, (Limit, Ctx)> for HashSet +impl<'a, T, S, Ctx, Predicate> DekuReader<'a, (Limit, Ctx)> for HashSet +where + T: DekuReader<'a, Ctx> + Eq + Hash, + S: BuildHasher + Default, + Ctx: Copy, + Predicate: FnMut(&T) -> bool, { /// Read `T`s until the given limit /// * `limit` - the limiting factor on the amount of `T`s to read @@ -55,19 +54,19 @@ impl< /// # Examples /// ```rust /// # use deku::ctx::*; - /// # use deku::DekuRead; - /// # use deku::bitvec::BitView; + /// # use deku::DekuReader; /// # use std::collections::HashSet; - /// let input = vec![1u8, 2, 3, 4]; + /// # use std::io::Cursor; + /// let mut input = Cursor::new(vec![1u8, 2, 3, 4]); /// let expected: HashSet = vec![0x04030201].into_iter().collect(); - /// let (rest, set) = HashSet::::read(input.view_bits(), (1.into(), Endian::Little)).unwrap(); - /// assert!(rest.is_empty()); + /// let mut reader = deku::reader::Reader::new(&mut input); + /// let set = HashSet::::from_reader_with_ctx(&mut reader, (1.into(), Endian::Little)).unwrap(); /// assert_eq!(expected, set) /// ``` - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, (limit, inner_ctx): (Limit, Ctx), - ) -> Result<(&'a BitSlice, Self), DekuError> + ) -> Result where Self: Sized, { @@ -76,22 +75,28 @@ impl< Limit::Count(mut count) => { // Handle the trivial case of reading an empty hashset if count == 0 { - return Ok((input, HashSet::::default())); + return Ok(HashSet::::default()); } // Otherwise, read until we have read `count` elements - read_hashset_with_predicate(input, Some(count), inner_ctx, move |_, _| { - count -= 1; - count == 0 - }) + from_reader_with_ctx_hashset_with_predicate( + reader, + Some(count), + inner_ctx, + move |_, _| { + count -= 1; + count == 0 + }, + ) } // Read until a given predicate returns true - Limit::Until(mut predicate, _) => { - read_hashset_with_predicate(input, None, inner_ctx, move |_, value| { - predicate(value) - }) - } + Limit::Until(mut predicate, _) => from_reader_with_ctx_hashset_with_predicate( + reader, + None, + inner_ctx, + move |_, value| predicate(value), + ), // Read until a given quantity of bits have been read Limit::BitSize(size) => { @@ -99,12 +104,15 @@ impl< // Handle the trivial case of reading an empty hashset if bit_size == 0 { - return Ok((input, HashSet::::default())); + return Ok(HashSet::::default()); } - read_hashset_with_predicate(input, None, inner_ctx, move |read_bits, _| { - read_bits == bit_size - }) + from_reader_with_ctx_hashset_with_predicate( + reader, + None, + inner_ctx, + move |read_bits, _| read_bits == bit_size, + ) } // Read until a given quantity of bits have been read @@ -113,29 +121,32 @@ impl< // Handle the trivial case of reading an empty hashset if bit_size == 0 { - return Ok((input, HashSet::::default())); + return Ok(HashSet::::default()); } - read_hashset_with_predicate(input, None, inner_ctx, move |read_bits, _| { - read_bits == bit_size - }) + from_reader_with_ctx_hashset_with_predicate( + reader, + None, + inner_ctx, + move |read_bits, _| read_bits == bit_size, + ) } } } } -impl<'a, T: DekuRead<'a> + Eq + Hash, S: BuildHasher + Default, Predicate: FnMut(&T) -> bool> - DekuRead<'a, Limit> for HashSet +impl<'a, T: DekuReader<'a> + Eq + Hash, S: BuildHasher + Default, Predicate: FnMut(&T) -> bool> + DekuReader<'a, Limit> for HashSet { /// Read `T`s until the given limit from input for types which don't require context. - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, limit: Limit, - ) -> Result<(&'a BitSlice, Self), DekuError> + ) -> Result where Self: Sized, { - Self::read(input, (limit, ())) + Self::from_reader_with_ctx(reader, (limit, ())) } } @@ -165,49 +176,61 @@ impl, S, Ctx: Copy> DekuWrite for HashSet { #[cfg(test)] mod tests { - use super::*; + use no_std_io::io::Cursor; use rstest::rstest; use rustc_hash::FxHashSet; - #[rstest(input, endian, bit_size, limit, expected, expected_rest, - case::count_0([0xAA].as_ref(), Endian::Little, Some(8), 0.into(), FxHashSet::default(), bits![u8, Msb0; 1, 0, 1, 0, 1, 0, 1, 0]), - case::count_1([0xAA, 0xBB].as_ref(), Endian::Little, Some(8), 1.into(), vec![0xAA].into_iter().collect(), bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::count_2([0xAA, 0xBB, 0xCC].as_ref(), Endian::Little, Some(8), 2.into(), vec![0xAA, 0xBB].into_iter().collect(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0]), - case::until_null([0xAA, 0, 0xBB].as_ref(), Endian::Little, None, (|v: &u8| *v == 0u8).into(), vec![0xAA, 0].into_iter().collect(), bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::until_bits([0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(8).into(), vec![0xAA].into_iter().collect(), bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::bits_6([0b0110_1001, 0b1110_1001].as_ref(), Endian::Little, Some(6), 2.into(), vec![0b00_011010, 0b00_011110].into_iter().collect(), bits![u8, Msb0; 1, 0, 0, 1]), + use crate::reader::Reader; + + use super::*; + + #[rstest(input, endian, bit_size, limit, expected, expected_rest_bits, expected_rest_bytes, + case::count_0([0xAA].as_ref(), Endian::Little, Some(8), 0.into(), FxHashSet::default(), bits![u8, Msb0;], &[0xaa]), + case::count_1([0xAA, 0xBB].as_ref(), Endian::Little, Some(8), 1.into(), vec![0xAA].into_iter().collect(), bits![u8, Msb0;], &[0xbb]), + case::count_2([0xAA, 0xBB, 0xCC].as_ref(), Endian::Little, Some(8), 2.into(), vec![0xAA, 0xBB].into_iter().collect(), bits![u8, Msb0;], &[0xcc]), + case::until_null([0xAA, 0, 0xBB].as_ref(), Endian::Little, None, (|v: &u8| *v == 0u8).into(), vec![0xAA, 0].into_iter().collect(), bits![u8, Msb0;], &[0xbb]), + case::until_bits([0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(8).into(), vec![0xAA].into_iter().collect(), bits![u8, Msb0;], &[0xbb]), + case::bits_6([0b0110_1001, 0b1110_1001].as_ref(), Endian::Little, Some(6), 2.into(), vec![0b00_011010, 0b00_011110].into_iter().collect(), bits![u8, Msb0; 1, 0, 0, 1], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), FxHashSet::default(), bits![u8, Msb0;]), + case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), FxHashSet::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(9), 1.into(), FxHashSet::default(), bits![u8, Msb0;]), + case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(9), 1.into(), FxHashSet::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(8), 2.into(), FxHashSet::default(), bits![u8, Msb0;]), + case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(8), 2.into(), FxHashSet::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data_until([0xAA].as_ref(), Endian::Little, Some(8), (|_: &u8| false).into(), FxHashSet::default(), bits![u8, Msb0;]), + case::not_enough_data_until([0xAA].as_ref(), Endian::Little, Some(8), (|_: &u8| false).into(), FxHashSet::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data_bits([0xAA].as_ref(), Endian::Little, Some(8), (BitSize(16)).into(), FxHashSet::default(), bits![u8, Msb0;]), + case::not_enough_data_bits([0xAA].as_ref(), Endian::Little, Some(8), (BitSize(16)).into(), FxHashSet::default(), bits![u8, Msb0;], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::too_much_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(9), 1.into(), FxHashSet::default(), bits![u8, Msb0;]), + case::too_much_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(9), 1.into(), FxHashSet::default(), bits![u8, Msb0;], &[]), )] - fn test_hashset_read bool>( + fn test_hashset_read bool + Copy>( input: &[u8], endian: Endian, bit_size: Option, limit: Limit, expected: FxHashSet, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], ) { - let bit_slice = input.view_bits::(); - - let (rest, res_read) = match bit_size { - Some(bit_size) => { - FxHashSet::::read(bit_slice, (limit, (endian, BitSize(bit_size)))).unwrap() - } - None => FxHashSet::::read(bit_slice, (limit, (endian))).unwrap(), + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = match bit_size { + Some(bit_size) => FxHashSet::::from_reader_with_ctx( + &mut reader, + (limit, (endian, BitSize(bit_size))), + ) + .unwrap(), + None => FxHashSet::::from_reader_with_ctx(&mut reader, (limit, (endian))).unwrap(), }; - assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + cursor.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); } #[rstest(input, endian, expected, @@ -220,32 +243,42 @@ mod tests { } // Note: These tests also exist in boxed.rs - #[rstest(input, endian, bit_size, limit, expected, expected_rest, expected_write, - case::normal_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), 2.into(), vec![0xBBAA, 0xDDCC].into_iter().collect(), bits![u8, Msb0;], vec![0xCC, 0xDD, 0xAA, 0xBB]), - case::normal_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), 2.into(), vec![0xAABB, 0xCCDD].into_iter().collect(), bits![u8, Msb0;], vec![0xCC, 0xDD, 0xAA, 0xBB]), - case::predicate_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), (|v: &u16| *v == 0xBBAA).into(), vec![0xBBAA].into_iter().collect(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::predicate_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), (|v: &u16| *v == 0xAABB).into(), vec![0xAABB].into_iter().collect(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::bytes_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), BitSize(16).into(), vec![0xBBAA].into_iter().collect(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::bytes_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), BitSize(16).into(), vec![0xAABB].into_iter().collect(), bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), + #[rstest(input, endian, bit_size, limit, expected, expected_rest_bits, expected_rest_bytes, expected_write, + case::normal_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), 2.into(), vec![0xBBAA, 0xDDCC].into_iter().collect(), bits![u8, Msb0;], &[], vec![0xCC, 0xDD, 0xAA, 0xBB]), + case::normal_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), 2.into(), vec![0xAABB, 0xCCDD].into_iter().collect(), bits![u8, Msb0;], &[], vec![0xCC, 0xDD, 0xAA, 0xBB]), + case::predicate_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), (|v: &u16| *v == 0xBBAA).into(), vec![0xBBAA].into_iter().collect(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::predicate_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), (|v: &u16| *v == 0xAABB).into(), vec![0xAABB].into_iter().collect(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::bytes_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), BitSize(16).into(), vec![0xBBAA].into_iter().collect(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::bytes_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), BitSize(16).into(), vec![0xAABB].into_iter().collect(), bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), )] - fn test_hashset_read_write bool>( + fn test_hashset_read_write bool + Copy>( input: &[u8], endian: Endian, bit_size: Option, limit: Limit, expected: FxHashSet, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], expected_write: Vec, ) { - let bit_slice = input.view_bits::(); - // Unwrap here because all test cases are `Some`. let bit_size = bit_size.unwrap(); - let (rest, res_read) = - FxHashSet::::read(bit_slice, (limit, (endian, BitSize(bit_size)))).unwrap(); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = FxHashSet::::from_reader_with_ctx( + &mut reader, + (limit, (endian, BitSize(bit_size))), + ) + .unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + cursor.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); let mut res_write = bitvec![u8, Msb0;]; res_read diff --git a/src/impls/ipaddr.rs b/src/impls/ipaddr.rs index 304ac8fd..9ad5bf2c 100644 --- a/src/impls/ipaddr.rs +++ b/src/impls/ipaddr.rs @@ -1,20 +1,21 @@ -use crate::{DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; -impl<'a, Ctx> DekuRead<'a, Ctx> for Ipv4Addr +use no_std_io::io::Read; + +use bitvec::prelude::*; + +use crate::{DekuError, DekuReader, DekuWrite}; + +impl<'a, Ctx> DekuReader<'a, Ctx> for Ipv4Addr where - u32: DekuRead<'a, Ctx>, + u32: DekuReader<'a, Ctx>, { - fn read( - input: &'a BitSlice, - ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - let (rest, ip) = u32::read(input, ctx)?; - Ok((rest, ip.into())) + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, + inner_ctx: Ctx, + ) -> Result { + let ip = u32::from_reader_with_ctx(reader, inner_ctx)?; + Ok(ip.into()) } } @@ -28,19 +29,16 @@ where } } -impl<'a, Ctx> DekuRead<'a, Ctx> for Ipv6Addr +impl<'a, Ctx> DekuReader<'a, Ctx> for Ipv6Addr where - u128: DekuRead<'a, Ctx>, + u128: DekuReader<'a, Ctx>, { - fn read( - input: &'a BitSlice, - ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - let (rest, ip) = u128::read(input, ctx)?; - Ok((rest, ip.into())) + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, + inner_ctx: Ctx, + ) -> Result { + let ip = u128::from_reader_with_ctx(reader, inner_ctx)?; + Ok(ip.into()) } } @@ -69,46 +67,36 @@ where #[cfg(test)] mod tests { - use super::*; - use crate::ctx::Endian; + use no_std_io::io::Cursor; use rstest::rstest; - #[rstest(input, endian, expected, expected_rest, - case::normal_le([237, 160, 254, 145].as_ref(), Endian::Little, Ipv4Addr::new(145, 254, 160, 237), bits![u8, Msb0;]), - case::normal_be([145, 254, 160, 237].as_ref(), Endian::Big, Ipv4Addr::new(145, 254, 160, 237), bits![u8, Msb0;]), + use super::*; + use crate::{ctx::Endian, reader::Reader}; + + #[rstest(input, endian, expected, + case::normal_le([237, 160, 254, 145].as_ref(), Endian::Little, Ipv4Addr::new(145, 254, 160, 237)), + case::normal_be([145, 254, 160, 237].as_ref(), Endian::Big, Ipv4Addr::new(145, 254, 160, 237)), )] - fn test_ipv4( - input: &[u8], - endian: Endian, - expected: Ipv4Addr, - expected_rest: &BitSlice, - ) { - let bit_slice = input.view_bits::(); - - let (rest, res_read) = Ipv4Addr::read(bit_slice, endian).unwrap(); + fn test_ipv4(input: &[u8], endian: Endian, expected: Ipv4Addr) { + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = Ipv4Addr::from_reader_with_ctx(&mut reader, endian).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, endian).unwrap(); assert_eq!(input.to_vec(), res_write.into_vec()); } - #[rstest(input, endian, expected, expected_rest, - case::normal_le([0xFF, 0x02, 0x0A, 0xC0, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00].as_ref(), Endian::Little, Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x02ff), bits![u8, Msb0;]), - case::normal_be([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xC0, 0x0A, 0x02, 0xFF].as_ref(), Endian::Big, Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x02ff), bits![u8, Msb0;]), + #[rstest(input, endian, expected, + case::normal_le([0xFF, 0x02, 0x0A, 0xC0, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00].as_ref(), Endian::Little, Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x02ff)), + case::normal_be([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xC0, 0x0A, 0x02, 0xFF].as_ref(), Endian::Big, Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc00a, 0x02ff)), )] - fn test_ipv6( - input: &[u8], - endian: Endian, - expected: Ipv6Addr, - expected_rest: &BitSlice, - ) { - let bit_slice = input.view_bits::(); - - let (rest, res_read) = Ipv6Addr::read(bit_slice, endian).unwrap(); + fn test_ipv6(input: &[u8], endian: Endian, expected: Ipv6Addr) { + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = Ipv6Addr::from_reader_with_ctx(&mut reader, endian).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, endian).unwrap(); @@ -127,7 +115,7 @@ mod tests { ip_addr.write(&mut ret_write, Endian::Little).unwrap(); assert_eq!( vec![ - 0xFF, 0x02, 0x0A, 0xC0, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xff, 0x02, 0x0a, 0xc0, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 ], ret_write.into_vec() diff --git a/src/impls/nonzero.rs b/src/impls/nonzero.rs index 33a5c2bf..d85b245b 100644 --- a/src/impls/nonzero.rs +++ b/src/impls/nonzero.rs @@ -1,26 +1,26 @@ -use crate::{ctx::*, DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; -use core::num::*; - #[cfg(feature = "alloc")] use alloc::format; +use core::num::*; +use no_std_io::io::Read; + +use bitvec::prelude::*; + +use crate::ctx::*; +use crate::{DekuError, DekuReader, DekuWrite}; macro_rules! ImplDekuTraitsCtx { ($typ:ty, $readtype:ty, $ctx_arg:tt, $ctx_type:tt) => { - impl DekuRead<'_, $ctx_type> for $typ { - fn read( - input: &BitSlice, + impl DekuReader<'_, $ctx_type> for $typ { + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, $ctx_arg: $ctx_type, - ) -> Result<(&BitSlice, Self), DekuError> - where - Self: Sized, - { - let (rest, value) = <$readtype>::read(input, $ctx_arg)?; + ) -> Result { + let value = <$readtype>::from_reader_with_ctx(reader, $ctx_arg)?; let value = <$typ>::new(value); match value { None => Err(DekuError::Parse(format!("NonZero assertion"))), - Some(v) => Ok((rest, v)), + Some(v) => Ok(v), } } } @@ -62,10 +62,13 @@ ImplDekuTraits!(NonZeroIsize, isize); #[cfg(test)] mod tests { - use super::*; use hexlit::hex; use rstest::rstest; + use crate::reader::Reader; + + use super::*; + #[rstest(input, expected, case(&hex!("FF"), NonZeroU8::new(0xFF).unwrap()), @@ -73,10 +76,11 @@ mod tests { case(&hex!("00"), NonZeroU8::new(0xFF).unwrap()), )] fn test_non_zero(input: &[u8], expected: NonZeroU8) { - let bit_slice = input.view_bits::(); - let (rest, res_read) = NonZeroU8::read(bit_slice, ()).unwrap(); + let mut bit_slice = input.view_bits::(); + + let mut reader = Reader::new(&mut bit_slice); + let res_read = NonZeroU8::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!(expected, res_read); - assert!(rest.is_empty()); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, ()).unwrap(); diff --git a/src/impls/option.rs b/src/impls/option.rs index 24096071..3eabefac 100644 --- a/src/impls/option.rs +++ b/src/impls/option.rs @@ -1,28 +1,15 @@ -use crate::{DekuError, DekuRead, DekuWrite}; use bitvec::prelude::*; +use no_std_io::io::Read; -impl<'a, T: DekuRead<'a, Ctx>, Ctx: Copy> DekuRead<'a, Ctx> for Option { - /// Read a T from input and store as Some(T) - /// * `inner_ctx` - The context required by `T`. It will be passed to every `T`s when constructing. - /// # Examples - /// ```rust - /// # use deku::ctx::*; - /// # use deku::DekuRead; - /// # use deku::bitvec::BitView; - /// let input = vec![1u8, 2, 3, 4]; - /// let (rest, v) = Option::::read(input.view_bits(), Endian::Little).unwrap(); - /// assert!(rest.is_empty()); - /// assert_eq!(v, Some(0x04030201)) - /// ``` - fn read( - input: &'a BitSlice, +use crate::{DekuError, DekuReader, DekuWrite}; + +impl<'a, T: DekuReader<'a, Ctx>, Ctx: Copy> DekuReader<'a, Ctx> for Option { + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, inner_ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - let (rest, val) = ::read(input, inner_ctx)?; - Ok((rest, Some(val))) + ) -> Result { + let val = ::from_reader_with_ctx(reader, inner_ctx)?; + Ok(Some(val)) } } @@ -42,3 +29,21 @@ impl, Ctx: Copy> DekuWrite for Option { self.as_ref().map_or(Ok(()), |v| v.write(output, inner_ctx)) } } + +#[cfg(test)] +mod tests { + use super::*; + use no_std_io::io::Cursor; + + use crate::reader::Reader; + + #[test] + fn test_option() { + use crate::ctx::*; + let input = &[1u8, 2, 3, 4]; + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let v = Option::::from_reader_with_ctx(&mut reader, Endian::Little).unwrap(); + assert_eq!(v, Some(0x04030201)) + } +} diff --git a/src/impls/primitive.rs b/src/impls/primitive.rs index f77a1ea3..9f44fac2 100644 --- a/src/impls/primitive.rs +++ b/src/impls/primitive.rs @@ -1,89 +1,87 @@ -use crate::{ctx::*, DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; -use core::convert::TryInto; - #[cfg(feature = "alloc")] use alloc::format; +#[cfg(feature = "alloc")] +use alloc::string::ToString; +use core::convert::TryInto; + +use bitvec::prelude::*; +use no_std_io::io::Read; + +use crate::ctx::*; +use crate::reader::{Reader, ReaderRet}; +use crate::{DekuError, DekuReader, DekuWrite}; + +/// "Read" trait: read bits and construct type +trait DekuRead<'a, Ctx = ()> { + /// Read bits and construct type + /// * **input** - Input as bits + /// * **ctx** - A context required by context-sensitive reading. A unit type `()` means no context + /// needed. + /// + /// Returns the amount of bits read after parsing in addition to Self. + /// + /// NOTE: since this is only used internally by primitive types, we don't need to verify the + /// size of BitSize or ByteSize to check if they fit in the requested container size + /// (size_of::()). + fn read( + input: &'a crate::bitvec::BitSlice, + ctx: Ctx, + ) -> Result<(usize, Self), DekuError> + where + Self: Sized; +} // specialize u8 for ByteSize impl DekuRead<'_, (Endian, ByteSize)> for u8 { + #[inline] fn read( input: &BitSlice, - (_, size): (Endian, ByteSize), - ) -> Result<(&BitSlice, Self), DekuError> { + (_, _): (Endian, ByteSize), + ) -> Result<(usize, Self), DekuError> { const MAX_TYPE_BITS: usize = BitSize::of::().0; - let bit_size: usize = size.0 * 8; - - // TODO - // if they never give [bits] or [bytes] we don't need to check the size - if bit_size > MAX_TYPE_BITS { - return Err(DekuError::Parse(format!( - "too much data: container of {MAX_TYPE_BITS} bits cannot hold {bit_size} bits", - ))); - } - if input.len() < bit_size { - return Err(DekuError::Incomplete(crate::error::NeedSize::new(bit_size))); - } - - let (bit_slice, rest) = input.split_at(bit_size); - let pad = 8 * ((bit_slice.len() + 7) / 8) - bit_slice.len(); - - let value = if pad == 0 - && bit_slice.len() == MAX_TYPE_BITS - && bit_slice.domain().region().unwrap().1.len() * 8 == MAX_TYPE_BITS - { - // if everything is aligned, just read the value - bit_slice.load::() - } else { - let mut bits: BitVec = BitVec::with_capacity(bit_slice.len() + pad); - - // Copy bits to new BitVec - bits.extend_from_bitslice(bit_slice); - - // Force align - //i.e. [1110, 10010110] -> [11101001, 0110] - bits.force_align(); - - let bytes: &[u8] = bits.as_raw_slice(); + // PANIC: We already check that input.len() < bit_size above, so no panic will happen + let value = input[..MAX_TYPE_BITS].load::(); + Ok((MAX_TYPE_BITS, value)) + } +} - // cannot use from_X_bytes as we don't have enough bytes for $typ - // read manually - let mut res: u8 = 0; - for b in bytes.iter().rev() { - res |= *b; +impl DekuReader<'_, (Endian, ByteSize)> for u8 { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, + (endian, size): (Endian, ByteSize), + ) -> Result { + let mut buf = [0; core::mem::size_of::()]; + let ret = reader.read_bytes(size.0, &mut buf)?; + let a = match ret { + ReaderRet::Bits(bits) => { + let Some(bits) = bits else { + return Err(DekuError::Parse("no bits read from reader".to_string())); + }; + let a = ::read(&bits, (endian, size))?; + a.1 } - - res + ReaderRet::Bytes => ::from_be_bytes(buf), }; - - Ok((rest, value)) + Ok(a) } } macro_rules! ImplDekuReadBits { ($typ:ty, $inner:ty) => { impl DekuRead<'_, (Endian, BitSize)> for $typ { + #[inline] fn read( input: &BitSlice, (endian, size): (Endian, BitSize), - ) -> Result<(&BitSlice, Self), DekuError> { + ) -> Result<(usize, Self), DekuError> { const MAX_TYPE_BITS: usize = BitSize::of::<$typ>().0; let bit_size: usize = size.0; let input_is_le = endian.is_le(); - if bit_size > MAX_TYPE_BITS { - return Err(DekuError::Parse(format!( - "too much data: container of {MAX_TYPE_BITS} bits cannot hold {bit_size} bits", - ))); - } - - if input.len() < bit_size { - return Err(DekuError::Incomplete(crate::error::NeedSize::new(bit_size))); - } - - let (bit_slice, rest) = input.split_at(bit_size); + let bit_slice = &input[..bit_size]; let pad = 8 * ((bit_slice.len() + 7) / 8) - bit_slice.len(); @@ -98,53 +96,75 @@ macro_rules! ImplDekuReadBits { } else { <$typ>::from_be_bytes(bytes.try_into()?) }; - return Ok((rest, value)); + return Ok((bit_size, value)); } } - // Create a new BitVec from the slice and pad un-aligned chunks - // i.e. [10010110, 1110] -> [10010110, 00001110] - let bits: BitVec = { - let mut bits = BitVec::with_capacity(bit_slice.len() + pad); + // Create a new BitVec from the slice and pad un-aligned chunks + // i.e. [10010110, 1110] -> [10010110, 00001110] + let bits: BitVec = { + let mut bits = BitVec::with_capacity(bit_slice.len() + pad); - // Copy bits to new BitVec - bits.extend_from_bitslice(bit_slice); + // Copy bits to new BitVec + bits.extend_from_bitslice(&bit_slice); - // Force align - //i.e. [1110, 10010110] -> [11101001, 0110] - bits.force_align(); + // Force align + //i.e. [1110, 10010110] -> [11101001, 0110] + bits.force_align(); - // Some padding to next byte - let index = if input_is_le { - bits.len() - (8 - pad) + // Some padding to next byte + let index = if input_is_le { + bits.len() - (8 - pad) + } else { + 0 + }; + for _ in 0..pad { + bits.insert(index, false); + } + + // Pad up-to size of type + for _ in 0..(MAX_TYPE_BITS - bits.len()) { + if input_is_le { + bits.push(false); } else { - 0 - }; - for _ in 0..pad { - bits.insert(index, false); + bits.insert(0, false); } + } - // Pad up-to size of type - for _ in 0..(MAX_TYPE_BITS - bits.len()) { - if input_is_le { - bits.push(false); - } else { - bits.insert(0, false); - } - } + bits + }; - bits - }; + let bytes: &[u8] = bits.domain().region().unwrap().1; - let bytes: &[u8] = bits.domain().region().unwrap().1; + // Read value + let value = if input_is_le { + <$typ>::from_le_bytes(bytes.try_into()?) + } else { + <$typ>::from_be_bytes(bytes.try_into()?) + }; + Ok((bit_size, value)) + } + } - // Read value - let value = if input_is_le { - <$typ>::from_le_bytes(bytes.try_into()?) - } else { - <$typ>::from_be_bytes(bytes.try_into()?) - }; - Ok((rest, value)) + impl DekuReader<'_, (Endian, BitSize)> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, + (endian, size): (Endian, BitSize), + ) -> Result<$typ, DekuError> { + const MAX_TYPE_BITS: usize = BitSize::of::<$typ>().0; + if size.0 > MAX_TYPE_BITS { + return Err(DekuError::Parse(format!( + "too much data: container of {MAX_TYPE_BITS} bits cannot hold {} bits", + size.0 + ))); + } + let bits = reader.read_bits(size.0)?; + let Some(bits) = bits else { + return Err(DekuError::Parse(format!("no bits read from reader",))); + }; + let a = <$typ>::read(&bits, (endian, size))?; + Ok(a.1) } } }; @@ -153,69 +173,65 @@ macro_rules! ImplDekuReadBits { macro_rules! ImplDekuReadBytes { ($typ:ty, $inner:ty) => { impl DekuRead<'_, (Endian, ByteSize)> for $typ { + #[inline] fn read( input: &BitSlice, (endian, size): (Endian, ByteSize), - ) -> Result<(&BitSlice, Self), DekuError> { - const MAX_TYPE_BITS: usize = BitSize::of::<$typ>().0; + ) -> Result<(usize, Self), DekuError> { let bit_size: usize = size.0 * 8; let input_is_le = endian.is_le(); - if bit_size > MAX_TYPE_BITS { - return Err(DekuError::Parse(format!( - "too much data: container of {MAX_TYPE_BITS} bits cannot hold {bit_size} bits", - ))); - } - - if input.len() < bit_size { - return Err(DekuError::Incomplete(crate::error::NeedSize::new(bit_size))); - } - - let (bit_slice, rest) = input.split_at(bit_size); - - let pad = 8 * ((bit_slice.len() + 7) / 8) - bit_slice.len(); + let bit_slice = &input[..bit_size]; let bytes = bit_slice.domain().region().unwrap().1; - let value = if pad == 0 - && bit_slice.len() == MAX_TYPE_BITS - && bytes.len() * 8 == MAX_TYPE_BITS - { - // if everything is aligned, just read the value - if input_is_le { - <$typ>::from_le_bytes(bytes.try_into()?) - } else { - <$typ>::from_be_bytes(bytes.try_into()?) - } + let value = if input_is_le { + <$typ>::from_le_bytes(bytes.try_into()?) } else { - let mut bits: BitVec = BitVec::with_capacity(bit_slice.len() + pad); - - // Copy bits to new BitVec - bits.extend_from_bitslice(bit_slice); + <$typ>::from_be_bytes(bytes.try_into()?) + }; - // Force align - //i.e. [1110, 10010110] -> [11101001, 0110] - bits.force_align(); + Ok((bit_size, value)) + } + } - // cannot use from_X_bytes as we don't have enough bytes for $typ - // read manually - let mut res: $inner = 0; - if input_is_le { - for b in bytes.iter().rev() { - res <<= 8 as $inner; - res |= *b as $inner; - } - } else { - for b in bytes.iter() { - res <<= 8 as $inner; - res |= *b as $inner; + impl DekuReader<'_, (Endian, ByteSize)> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, + (endian, size): (Endian, ByteSize), + ) -> Result<$typ, DekuError> { + const MAX_TYPE_BYTES: usize = core::mem::size_of::<$typ>(); + if size.0 > MAX_TYPE_BYTES { + return Err(DekuError::Parse(format!( + "too much data: container of {MAX_TYPE_BYTES} bytes cannot hold {} bytes", + size.0 + ))); + } + let mut buf = [0; core::mem::size_of::<$typ>()]; + let ret = reader.read_bytes(size.0, &mut buf)?; + let a = match ret { + ReaderRet::Bits(Some(bits)) => { + let a = <$typ>::read(&bits, (endian, size))?; + a.1 + } + ReaderRet::Bits(None) => { + return Err(DekuError::Parse(format!("no bits read from reader"))); + } + ReaderRet::Bytes => { + if endian.is_le() { + <$typ>::from_le_bytes(buf.try_into().unwrap()) + } else { + if size.0 != core::mem::size_of::<$typ>() { + let padding = core::mem::size_of::<$typ>() - size.0; + buf.copy_within(0..size.0, padding); + buf[..padding].fill(0x00); + } + <$typ>::from_be_bytes(buf.try_into().unwrap()) } - }; - - res as $typ + } }; - - Ok((rest, value)) + Ok(a) } } }; @@ -224,92 +240,151 @@ macro_rules! ImplDekuReadBytes { macro_rules! ImplDekuReadSignExtend { ($typ:ty, $inner:ty) => { impl DekuRead<'_, (Endian, ByteSize)> for $typ { + #[inline] fn read( input: &BitSlice, (endian, size): (Endian, ByteSize), - ) -> Result<(&BitSlice, Self), DekuError> { - let (rest, value) = + ) -> Result<(usize, Self), DekuError> { + let (amt_read, value) = <$inner as DekuRead<'_, (Endian, ByteSize)>>::read(input, (endian, size))?; const MAX_TYPE_BITS: usize = BitSize::of::<$typ>().0; let bit_size = size.0 * 8; let shift = MAX_TYPE_BITS - bit_size; let value = (value as $typ) << shift >> shift; - Ok((rest, value)) + Ok((amt_read, value)) + } + } + + impl DekuReader<'_, (Endian, ByteSize)> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, + (endian, size): (Endian, ByteSize), + ) -> Result<$typ, DekuError> { + let mut buf = [0; core::mem::size_of::<$typ>()]; + let ret = reader.read_bytes(size.0, &mut buf)?; + let a = match ret { + ReaderRet::Bits(bits) => { + let Some(bits) = bits else { + return Err(DekuError::Parse("no bits read from reader".to_string())); + }; + let a = <$typ>::read(&bits, (endian, size))?; + a.1 + } + ReaderRet::Bytes => { + if endian.is_le() { + <$typ>::from_le_bytes(buf.try_into()?) + } else { + <$typ>::from_be_bytes(buf.try_into()?) + } + } + }; + + const MAX_TYPE_BITS: usize = BitSize::of::<$typ>().0; + let bit_size = size.0 * 8; + let shift = MAX_TYPE_BITS - bit_size; + let value = (a as $typ) << shift >> shift; + Ok(value) } } + impl DekuRead<'_, (Endian, BitSize)> for $typ { + #[inline] fn read( input: &BitSlice, (endian, size): (Endian, BitSize), - ) -> Result<(&BitSlice, Self), DekuError> { - let (rest, value) = + ) -> Result<(usize, Self), DekuError> { + let (amt_read, value) = <$inner as DekuRead<'_, (Endian, BitSize)>>::read(input, (endian, size))?; const MAX_TYPE_BITS: usize = BitSize::of::<$typ>().0; let bit_size = size.0; let shift = MAX_TYPE_BITS - bit_size; let value = (value as $typ) << shift >> shift; - Ok((rest, value)) + Ok((amt_read, value)) + } + } + + impl DekuReader<'_, (Endian, BitSize)> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, + (endian, size): (Endian, BitSize), + ) -> Result<$typ, DekuError> { + const MAX_TYPE_BITS: usize = BitSize::of::<$typ>().0; + if size.0 > MAX_TYPE_BITS { + return Err(DekuError::Parse(format!( + "too much data: container of {MAX_TYPE_BITS} bits cannot hold {} bits", + size.0 + ))); + } + let bits = reader.read_bits(size.0)?; + let Some(bits) = bits else { + return Err(DekuError::Parse(format!("no bits read from reader",))); + }; + let a = <$typ>::read(&bits, (endian, size))?; + Ok(a.1) } } }; } +// TODO: these forward types should forward on a ContainerCanHoldSize or something if ByteSize or +// BitSize wasn't defined macro_rules! ForwardDekuRead { ($typ:ty) => { // Only have `endian`, set `bit_size` to `Size::of::()` - impl DekuRead<'_, Endian> for $typ { - fn read( - input: &BitSlice, + impl DekuReader<'_, Endian> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, endian: Endian, - ) -> Result<(&BitSlice, Self), DekuError> { - let bit_size = BitSize::of::<$typ>(); + ) -> Result<$typ, DekuError> { + let byte_size = core::mem::size_of::<$typ>(); - // Since we don't have a #[bits] or [bytes], check if we can use bytes for perf - if (bit_size.0 % 8) == 0 { - <$typ>::read(input, (endian, ByteSize(bit_size.0 / 8))) - } else { - <$typ>::read(input, (endian, bit_size)) - } + <$typ>::from_reader_with_ctx(reader, (endian, ByteSize(byte_size))) } } - // Only have `bit_size`, set `endian` to `Endian::default`. - impl DekuRead<'_, ByteSize> for $typ { - fn read( - input: &BitSlice, + // Only have `byte_size`, set `endian` to `Endian::default`. + impl DekuReader<'_, ByteSize> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, byte_size: ByteSize, - ) -> Result<(&BitSlice, Self), DekuError> { + ) -> Result<$typ, DekuError> { let endian = Endian::default(); - <$typ>::read(input, (endian, byte_size)) + let a = <$typ>::from_reader_with_ctx(reader, (endian, byte_size))?; + Ok(a) } } - // Only have `bit_size`, set `endian` to `Endian::default`. - impl DekuRead<'_, BitSize> for $typ { - fn read( - input: &BitSlice, + //// Only have `bit_size`, set `endian` to `Endian::default`. + impl DekuReader<'_, BitSize> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, bit_size: BitSize, - ) -> Result<(&BitSlice, Self), DekuError> { + ) -> Result<$typ, DekuError> { let endian = Endian::default(); - // check if we can use ByteSize for performance if (bit_size.0 % 8) == 0 { - <$typ>::read(input, (endian, ByteSize(bit_size.0 / 8))) + <$typ>::from_reader_with_ctx(reader, (endian, ByteSize(bit_size.0 / 8))) } else { - <$typ>::read(input, (endian, bit_size)) + <$typ>::from_reader_with_ctx(reader, (endian, bit_size)) } } } - impl DekuRead<'_> for $typ { - fn read( - input: &BitSlice, + impl DekuReader<'_> for $typ { + #[inline] + fn from_reader_with_ctx( + reader: &mut Reader, _: (), - ) -> Result<(&BitSlice, Self), DekuError> { - <$typ>::read(input, Endian::default()) + ) -> Result<$typ, DekuError> { + <$typ>::from_reader_with_ctx(reader, Endian::default()) } } }; @@ -318,6 +393,7 @@ macro_rules! ForwardDekuRead { macro_rules! ImplDekuWrite { ($typ:ty) => { impl DekuWrite<(Endian, BitSize)> for $typ { + #[inline] fn write( &self, output: &mut BitVec, @@ -363,6 +439,7 @@ macro_rules! ImplDekuWrite { } impl DekuWrite<(Endian, ByteSize)> for $typ { + #[inline] fn write( &self, output: &mut BitVec, @@ -409,6 +486,7 @@ macro_rules! ImplDekuWrite { // Only have `endian`, return all input impl DekuWrite for $typ { + #[inline] fn write( &self, output: &mut BitVec, @@ -429,6 +507,7 @@ macro_rules! ForwardDekuWrite { ($typ:ty) => { // Only have `bit_size`, set `endian` to `Endian::default`. impl DekuWrite for $typ { + #[inline] fn write( &self, output: &mut BitVec, @@ -440,6 +519,7 @@ macro_rules! ForwardDekuWrite { // Only have `bit_size`, set `endian` to `Endian::default`. impl DekuWrite for $typ { + #[inline] fn write( &self, output: &mut BitVec, @@ -450,6 +530,7 @@ macro_rules! ForwardDekuWrite { } impl DekuWrite for $typ { + #[inline] fn write(&self, output: &mut BitVec, _: ()) -> Result<(), DekuError> { <$typ>::write(self, output, Endian::default()) } @@ -518,140 +599,196 @@ ImplDekuTraitsBytes!(f64, u64); #[cfg(test)] mod tests { - use super::*; - use crate::native_endian; use rstest::rstest; + use super::*; + use crate::{native_endian, reader::Reader}; + static ENDIAN: Endian = Endian::new(); macro_rules! TestPrimitive { ($test_name:ident, $typ:ty, $input:expr, $expected:expr) => { #[test] fn $test_name() { - let input = $input; - let bit_slice = input.view_bits::(); - let (_rest, res_read) = <$typ>::read(bit_slice, ENDIAN).unwrap(); + let mut r = std::io::Cursor::new($input); + let mut reader = Reader::new(&mut r); + let res_read = <$typ>::from_reader_with_ctx(&mut reader, ENDIAN).unwrap(); assert_eq!($expected, res_read); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, ENDIAN).unwrap(); - assert_eq!(input, res_write.into_vec()); + assert_eq!($input, res_write.into_vec()); } }; } - TestPrimitive!(test_u8, u8, vec![0xAAu8], 0xAAu8); + TestPrimitive!(test_u8, u8, vec![0xaau8], 0xaau8); TestPrimitive!( test_u16, u16, - vec![0xABu8, 0xCD], - native_endian!(0xCDAB_u16) + vec![0xabu8, 0xcd], + native_endian!(0xcdab_u16) ); TestPrimitive!( test_u32, u32, - vec![0xABu8, 0xCD, 0xEF, 0xBE], - native_endian!(0xBEEFCDAB_u32) + vec![0xabu8, 0xcd, 0xef, 0xbe], + native_endian!(0xbeefcdab_u32) ); TestPrimitive!( test_u64, u64, - vec![0xABu8, 0xCD, 0xEF, 0xBE, 0xAB, 0xCD, 0xFE, 0xC0], - native_endian!(0xC0FECDABBEEFCDAB_u64) + vec![0xabu8, 0xcd, 0xef, 0xbe, 0xab, 0xcd, 0xfe, 0xc0], + native_endian!(0xc0fecdabbeefcdab_u64) ); TestPrimitive!( test_u128, u128, vec![ - 0xABu8, 0xCD, 0xEF, 0xBE, 0xAB, 0xCD, 0xFE, 0xC0, 0xAB, 0xCD, 0xEF, 0xBE, 0xAB, 0xCD, - 0xFE, 0xC0 + 0xabu8, 0xcd, 0xef, 0xbe, 0xab, 0xcd, 0xfe, 0xc0, 0xab, 0xcd, 0xef, 0xbe, 0xab, 0xcd, + 0xfe, 0xc0 ], - native_endian!(0xC0FECDABBEEFCDABC0FECDABBEEFCDAB_u128) + native_endian!(0xc0fecdabbeefcdabc0fecdabbeefcdab_u128) ); TestPrimitive!( test_usize, usize, - vec![0xABu8, 0xCD, 0xEF, 0xBE, 0xAB, 0xCD, 0xFE, 0xC0], + vec![0xabu8, 0xcd, 0xef, 0xbe, 0xab, 0xcd, 0xfe, 0xc0], if core::mem::size_of::() == 8 { - native_endian!(0xC0FECDABBEEFCDAB_usize) + native_endian!(0xc0fecdabbeefcdab_usize) } else { - native_endian!(0xBEEFCDAB_usize) + native_endian!(0xbeefcdab_usize) } ); - TestPrimitive!(test_i8, i8, vec![0xFBu8], -5); - TestPrimitive!(test_i16, i16, vec![0xFDu8, 0xFE], native_endian!(-259_i16)); + TestPrimitive!(test_i8, i8, vec![0xfbu8], -5); + TestPrimitive!(test_i16, i16, vec![0xfdu8, 0xfe], native_endian!(-259_i16)); TestPrimitive!( test_i32, i32, - vec![0x02u8, 0x3F, 0x01, 0xEF], - native_endian!(-0x10FEC0FE_i32) + vec![0x02u8, 0x3f, 0x01, 0xef], + native_endian!(-0x10fec0fe_i32) ); TestPrimitive!( test_i64, i64, - vec![0x02u8, 0x3F, 0x01, 0xEF, 0x01, 0x3F, 0x01, 0xEF], - native_endian!(-0x10FEC0FE10FEC0FE_i64) + vec![0x02u8, 0x3f, 0x01, 0xef, 0x01, 0x3f, 0x01, 0xef], + native_endian!(-0x10fec0fe10fec0fe_i64) ); TestPrimitive!( test_i128, i128, vec![ - 0x02u8, 0x3F, 0x01, 0xEF, 0x01, 0x3F, 0x01, 0xEF, 0x01, 0x3F, 0x01, 0xEF, 0x01, 0x3F, - 0x01, 0xEF + 0x02u8, 0x3f, 0x01, 0xef, 0x01, 0x3f, 0x01, 0xef, 0x01, 0x3f, 0x01, 0xef, 0x01, 0x3f, + 0x01, 0xef ], - native_endian!(-0x10FEC0FE10FEC0FE10FEC0FE10FEC0FE_i128) + native_endian!(-0x10fec0fe10fec0fe10fec0fe10fec0fe_i128) ); TestPrimitive!( test_isize, isize, - vec![0x02u8, 0x3F, 0x01, 0xEF, 0x01, 0x3F, 0x01, 0xEF], + vec![0x02u8, 0x3f, 0x01, 0xef, 0x01, 0x3f, 0x01, 0xef], if core::mem::size_of::() == 8 { - native_endian!(-0x10FEC0FE10FEC0FE_isize) + native_endian!(-0x10fec0fe10fec0fe_isize) } else { - native_endian!(-0x10FEC0FE_isize) + native_endian!(-0x10fec0fe_isize) } ); TestPrimitive!( test_f32, f32, - vec![0xA6u8, 0x9B, 0xC4, 0xBB], + vec![0xa6u8, 0x9b, 0xc4, 0xbb], native_endian!(-0.006_f32) ); TestPrimitive!( test_f64, f64, - vec![0xFAu8, 0x7E, 0x6A, 0xBC, 0x74, 0x93, 0x78, 0xBF], + vec![0xfau8, 0x7e, 0x6a, 0xbc, 0x74, 0x93, 0x78, 0xbf], native_endian!(-0.006_f64) ); - #[rstest(input, endian, bit_size, expected, expected_rest, - case::normal([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, Some(32), 0xAABB_CCDD, bits![u8, Msb0;]), - case::normal_bits_12_le([0b1001_0110, 0b1110_0000, 0xCC, 0xDD ].as_ref(), Endian::Little, Some(12), 0b1110_1001_0110, bits![u8, Msb0; 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1]), - case::normal_bits_12_be([0b1001_0110, 0b1110_0000, 0xCC, 0xDD ].as_ref(), Endian::Big, Some(12), 0b1001_0110_1110, bits![u8, Msb0; 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1]), - case::normal_bit_6([0b1001_0110].as_ref(), Endian::Little, Some(6), 0b1001_01, bits![u8, Msb0; 1, 0,]), + #[rstest(input, endian, bit_size, expected, expected_rest_bits, expected_rest_bytes, + case::normal([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, Some(32), 0xAABB_CCDD, bits![u8, Msb0;], &[]), + case::normal([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Big, Some(32), 0xDDCC_BBAA, bits![u8, Msb0;], &[]), + case::normal_bits_12_le([0b1001_0110, 0b1110_0000, 0xCC, 0xDD ].as_ref(), Endian::Little, Some(12), 0b1110_1001_0110, bits![u8, Msb0; 0, 0, 0, 0], &[0xcc, 0xdd]), + case::normal_bits_12_be([0b1001_0110, 0b1110_0000, 0xCC, 0xDD ].as_ref(), Endian::Big, Some(12), 0b1001_0110_1110, bits![u8, Msb0; 0, 0, 0, 0], &[0xcc, 0xdd]), + case::normal_bit_6([0b1001_0110].as_ref(), Endian::Little, Some(6), 0b1001_01, bits![u8, Msb0; 1, 0,], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 32 })")] - case::not_enough_data([].as_ref(), Endian::Little, Some(32), 0xFF, bits![u8, Msb0;]), + case::not_enough_data([].as_ref(), Endian::Little, Some(32), 0xFF, bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 32 })")] - case::not_enough_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(32), 0xFF, bits![u8, Msb0;]), - #[should_panic(expected = "Parse(\"too much data: container of 32 bits cannot hold 64 bits\")")] - case::too_much_data([0xAA, 0xBB, 0xCC, 0xDD, 0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(64), 0xFF, bits![u8, Msb0;]), + case::not_enough_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(32), 0xFF, bits![u8, Msb0;], &[]), + #[should_panic(expected = "Parse(\"too much data: container of 32 bits cannot hold 64 bits\")")] // This will end up in ByteSize b/c 64 % 8 == 0 + case::too_much_data([0xAA, 0xBB, 0xCC, 0xDD, 0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(64), 0xFF, bits![u8, Msb0;], &[]), + #[should_panic(expected = "Parse(\"too much data: container of 32 bits cannot hold 63 bits\")")] // This will end up staying BitSize + case::too_much_data([0xAA, 0xBB, 0xCC, 0xDD, 0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(63), 0xFF, bits![u8, Msb0;], &[]), )] fn test_bit_read( - input: &[u8], + mut input: &[u8], endian: Endian, bit_size: Option, expected: u32, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], + ) { + // test both Read &[u8] and Read BitVec + let mut reader = Reader::new(&mut input); + let res_read = match bit_size { + Some(bit_size) => { + u32::from_reader_with_ctx(&mut reader, (endian, BitSize(bit_size))).unwrap() + } + None => u32::from_reader_with_ctx(&mut reader, endian).unwrap(), + }; + assert_eq!(expected, res_read); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + input.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); + } + + #[rstest(input, endian, byte_size, expected, expected_rest_bytes, + case::normal_be([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Big, Some(4), 0xDDCC_BBAA, &[]), + case::normal_le([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, Some(4), 0xAABB_CCDD, &[]), + case::normal_be([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Big, Some(3), 0x00DDCC_BB, &[0xaa]), + case::normal_be([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, Some(3), 0x00BB_CCDD, &[0xaa]), + #[should_panic(expected = "Incomplete(NeedSize { bits: 32 })")] + case::not_enough_data([].as_ref(), Endian::Little, Some(4), 0xFF, &[]), + #[should_panic(expected = "Incomplete(NeedSize { bits: 32 })")] + case::not_enough_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(4), 0xFF, &[]), + #[should_panic(expected = "Parse(\"too much data: container of 4 bytes cannot hold 8 bytes\")")] + case::too_much_data([0xAA, 0xBB, 0xCC, 0xDD, 0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(8), 0xFF, &[]), + )] + fn test_byte_read( + mut input: &[u8], + endian: Endian, + byte_size: Option, + expected: u32, + expected_rest_bytes: &[u8], ) { - let bit_slice = input.view_bits::(); + let mut bit_slice = input.view_bits::(); - let (rest, res_read) = match bit_size { - Some(bit_size) => u32::read(bit_slice, (endian, BitSize(bit_size))).unwrap(), - None => u32::read(bit_slice, endian).unwrap(), + // test both Read &[u8] and Read BitVec + let mut reader = Reader::new(&mut input); + let res_read = match byte_size { + Some(byte_size) => { + u32::from_reader_with_ctx(&mut reader, (endian, ByteSize(byte_size))).unwrap() + } + None => u32::from_reader_with_ctx(&mut reader, endian).unwrap(), }; + assert_eq!(expected, res_read); + let mut reader = Reader::new(&mut bit_slice); + let res_read = match byte_size { + Some(byte_size) => { + u32::from_reader_with_ctx(&mut reader, (endian, ByteSize(byte_size))).unwrap() + } + None => u32::from_reader_with_ctx(&mut reader, endian).unwrap(), + }; assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + let mut buf = vec![]; + input.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); } #[rstest(input, endian, bit_size, expected, @@ -673,25 +810,26 @@ mod tests { assert_eq!(expected, res_write.into_vec()); } - #[rstest(input, endian, bit_size, expected, expected_rest, expected_write, - case::normal([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, Some(32), 0xAABB_CCDD, bits![u8, Msb0;], vec![0xDD, 0xCC, 0xBB, 0xAA]), + #[rstest(input, endian, bit_size, expected, expected_write, + case::normal([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, Some(32), 0xAABB_CCDD, vec![0xDD, 0xCC, 0xBB, 0xAA]), )] fn test_bit_read_write( input: &[u8], endian: Endian, bit_size: Option, expected: u32, - expected_rest: &BitSlice, expected_write: Vec, ) { - let bit_slice = input.view_bits::(); + let mut bit_slice = input.view_bits::(); - let (rest, res_read) = match bit_size { - Some(bit_size) => u32::read(bit_slice, (endian, BitSize(bit_size))).unwrap(), - None => u32::read(bit_slice, endian).unwrap(), + let mut reader = Reader::new(&mut bit_slice); + let res_read = match bit_size { + Some(bit_size) => { + u32::from_reader_with_ctx(&mut reader, (endian, BitSize(bit_size))).unwrap() + } + None => u32::from_reader_with_ctx(&mut reader, endian).unwrap(), }; assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); let mut res_write = bitvec![u8, Msb0;]; match bit_size { @@ -708,12 +846,12 @@ mod tests { ($test_name:ident, $typ:ty) => { #[test] fn $test_name() { - let bit_slice = [0b10101_000].view_bits::(); - - let (rest, res_read) = <$typ>::read(bit_slice, (Endian::Little, BitSize(5))).unwrap(); - + let mut slice = [0b10101_000].as_slice(); + let mut reader = Reader::new(&mut slice); + let res_read = + <$typ>::from_reader_with_ctx(&mut reader, (Endian::Little, BitSize(5))) + .unwrap(); assert_eq!(-11, res_read); - assert_eq!(bits![u8, Msb0; 0, 0, 0], rest); } }; } @@ -724,4 +862,30 @@ mod tests { TestSignExtending!(test_sign_extend_i64, i64); TestSignExtending!(test_sign_extend_i128, i128); TestSignExtending!(test_sign_extend_isize, isize); + + macro_rules! TestSignExtendingPanic { + ($test_name:ident, $typ:ty, $size:expr) => { + #[test] + fn $test_name() { + let mut slice = [0b10101_000].as_slice(); + let mut reader = Reader::new(&mut slice); + let res_read = + <$typ>::from_reader_with_ctx(&mut reader, (Endian::Little, BitSize($size + 1))); + assert_eq!( + DekuError::Parse(format!( + "too much data: container of {} bits cannot hold {} bits", + $size, + $size + 1 + )), + res_read.err().unwrap() + ); + } + }; + } + + TestSignExtendingPanic!(test_sign_extend_i8_panic, i8, 8); + TestSignExtendingPanic!(test_sign_extend_i16_panic, i16, 16); + TestSignExtendingPanic!(test_sign_extend_i32_panic, i32, 32); + TestSignExtendingPanic!(test_sign_extend_i64_panic, i64, 64); + TestSignExtendingPanic!(test_sign_extend_i128_panic, i128, 128); } diff --git a/src/impls/slice.rs b/src/impls/slice.rs index 6f85c21e..a6b60408 100644 --- a/src/impls/slice.rs +++ b/src/impls/slice.rs @@ -1,281 +1,93 @@ //! Implementations of DekuRead and DekuWrite for [T; N] where 0 < N <= 32 -use crate::{ctx::Limit, DekuError, DekuRead, DekuWrite}; +use crate::{DekuError, DekuWrite}; use bitvec::prelude::*; -pub use deku_derive::*; +use core::mem::MaybeUninit; +use no_std_io::io::Read; -/// Read `u8`s and returns a byte slice up until a given predicate returns true -/// * `ctx` - The context required by `u8`. It will be passed to every `u8` when constructing. -/// * `predicate` - the predicate that decides when to stop reading `u8`s -/// The predicate takes two parameters: the number of bits that have been read so far, -/// and a borrow of the latest value to have been read. It should return `true` if reading -/// should now stop, and `false` otherwise -fn read_slice_with_predicate<'a, Ctx: Copy, Predicate: FnMut(usize, &u8) -> bool>( - input: &'a BitSlice, - ctx: Ctx, - mut predicate: Predicate, -) -> Result<(&'a BitSlice, &[u8]), DekuError> -where - u8: DekuRead<'a, Ctx>, -{ - let mut rest = input; - let mut value; - - loop { - let (new_rest, val) = u8::read(rest, ctx)?; - rest = new_rest; - - let read_idx = unsafe { rest.as_bitptr().offset_from(input.as_bitptr()) } as usize; - value = input[..read_idx].domain().region().unwrap().1; - - if predicate(read_idx, &val) { - break; - } - } - - Ok((rest, value)) -} +use crate::DekuReader; -impl<'a, Ctx: Copy, Predicate: FnMut(&u8) -> bool> DekuRead<'a, (Limit, Ctx)> - for &'a [u8] +impl<'a, Ctx: Copy, T, const N: usize> DekuReader<'a, Ctx> for [T; N] where - u8: DekuRead<'a, Ctx>, + T: DekuReader<'a, Ctx>, { - /// Read `u8`s until the given limit - /// * `limit` - the limiting factor on the amount of `u8`s to read - /// * `inner_ctx` - The context required by `u8`. It will be passed to every `u8`s when constructing. - /// # Examples - /// ```rust - /// # use deku::ctx::*; - /// # use deku::DekuRead; - /// # use bitvec::view::BitView; - /// let input = vec![1u8, 2, 3, 4]; - /// let (rest, v) = <&[u8]>::read(input.view_bits(), (4.into(), Endian::Little)).unwrap(); - /// assert!(rest.is_empty()); - /// assert_eq!(&[1u8, 2, 3, 4], v) - /// ``` - fn read( - input: &'a BitSlice, - (limit, inner_ctx): (Limit, Ctx), - ) -> Result<(&'a BitSlice, Self), DekuError> { - match limit { - // Read a given count of elements - Limit::Count(mut count) => { - // Handle the trivial case of reading an empty slice - if count == 0 { - return Ok((input, &input.domain().region().unwrap().1[..0])); - } - - // Otherwise, read until we have read `count` elements - read_slice_with_predicate(input, inner_ctx, move |_, _| { - count -= 1; - count == 0 - }) - } - - // Read until a given predicate returns true - Limit::Until(mut predicate, _) => { - read_slice_with_predicate(input, inner_ctx, move |_, value| predicate(value)) - } - - // Read until a given quantity of bits have been read - Limit::BitSize(size) => { - let bit_size = size.0; - - // Handle the trivial case of reading an empty vector - if bit_size == 0 { - return Ok((input, &input.domain().region().unwrap().1[..0])); - } - - read_slice_with_predicate(input, inner_ctx, move |read_bits, _| { - read_bits == bit_size - }) - } - - // Read until a given quantity of bytes have been read - Limit::ByteSize(size) => { - let bit_size = size.0 * 8; - - // Handle the trivial case of reading an empty vector - if bit_size == 0 { - return Ok((input, &input.domain().region().unwrap().1[..0])); - } - - read_slice_with_predicate(input, inner_ctx, move |read_bits, _| { - read_bits == bit_size - }) - } - } - } -} - -#[cfg(not(feature = "const_generics"))] -mod pre_const_generics_impl { - use super::*; - - macro_rules! ImplDekuSliceTraits { - ($typ:ty; $($count:expr),+ $(,)?) => { - - impl DekuWrite for &[$typ] - where - $typ: DekuWrite, - { - fn write(&self, output: &mut BitVec, ctx: Ctx) -> Result<(), DekuError> { - for v in *self { - v.write(output, ctx)?; - } - Ok(()) - } - } - - $( - impl<'a, Ctx: Copy> DekuRead<'a, Ctx> for [$typ; $count] - where - $typ: DekuRead<'a, Ctx>, - { - fn read( - input: &'a BitSlice, - ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - let mut slice: [$typ; $count] = Default::default(); - let mut rest = input; - for i in 0..$count { - let (new_rest, value) = <$typ>::read(rest, ctx)?; - slice[i] = value; - rest = new_rest; + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, + ctx: Ctx, + ) -> Result + where + Self: Sized, + { + #[allow(clippy::uninit_assumed_init)] + // This is safe because we initialize the array immediately after, + // and never return it in case of error + let mut slice: [MaybeUninit; N] = unsafe { MaybeUninit::uninit().assume_init() }; + for (n, item) in slice.iter_mut().enumerate() { + let value = match T::from_reader_with_ctx(reader, ctx) { + Ok(it) => it, + Err(err) => { + // For each item in the array, drop if we allocated it. + for item in &mut slice[0..n] { + unsafe { + item.assume_init_drop(); } - - Ok((rest, slice)) } + return Err(err); } + }; + item.write(value); + } - impl DekuWrite for [$typ; $count] - where - $typ: DekuWrite, - { - fn write(&self, output: &mut BitVec, ctx: Ctx) -> Result<(), DekuError> { - for v in self { - v.write(output, ctx)?; - } - Ok(()) - } - } - )+ + let val = unsafe { + // TODO: array_assume_init: https://github.com/rust-lang/rust/issues/80908 + (core::ptr::addr_of!(slice) as *const [T; N]).read() }; + Ok(val) } - - ImplDekuSliceTraits!(i8; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(i16; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(i32; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(i64; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(i128; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(isize; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(u8; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(u16; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(u32; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(u64; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(u128; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(usize; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(f32; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); - ImplDekuSliceTraits!(f64; 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32); } -#[cfg(feature = "const_generics")] -mod const_generics_impl { - use super::*; - - use core::mem::MaybeUninit; - - impl<'a, Ctx: Copy, T, const N: usize> DekuRead<'a, Ctx> for [T; N] - where - T: DekuRead<'a, Ctx>, - { - fn read( - input: &'a BitSlice, - ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> - where - Self: Sized, - { - #[allow(clippy::uninit_assumed_init)] - // This is safe because we initialize the array immediately after, - // and never return it in case of error - let mut slice: [MaybeUninit; N] = unsafe { MaybeUninit::uninit().assume_init() }; - let mut rest = input; - for (n, item) in slice.iter_mut().enumerate() { - let (new_rest, value) = match T::read(rest, ctx) { - Ok(it) => it, - Err(err) => { - // For each item in the array, drop if we allocated it. - for item in &mut slice[0..n] { - unsafe { - item.assume_init_drop(); - } - } - return Err(err); - } - }; - item.write(value); - rest = new_rest; - } - - Ok((rest, unsafe { - // TODO: array_assume_init: https://github.com/rust-lang/rust/issues/80908 - (&slice as *const _ as *const [T; N]).read() - })) - } - } - - impl DekuWrite for [T; N] - where - T: DekuWrite, - { - fn write(&self, output: &mut BitVec, ctx: Ctx) -> Result<(), DekuError> { - for v in self { - v.write(output, ctx)?; - } - Ok(()) +impl DekuWrite for [T; N] +where + T: DekuWrite, +{ + fn write(&self, output: &mut BitVec, ctx: Ctx) -> Result<(), DekuError> { + for v in self { + v.write(output, ctx)?; } + Ok(()) } +} - impl DekuWrite for &[T] - where - T: DekuWrite, - { - fn write(&self, output: &mut BitVec, ctx: Ctx) -> Result<(), DekuError> { - for v in *self { - v.write(output, ctx)?; - } - Ok(()) +impl DekuWrite for &[T] +where + T: DekuWrite, +{ + fn write(&self, output: &mut BitVec, ctx: Ctx) -> Result<(), DekuError> { + for v in *self { + v.write(output, ctx)?; } + Ok(()) } } #[cfg(test)] mod tests { - use super::*; - - use crate::ctx::Endian; + use crate::DekuWrite; + use bitvec::prelude::*; use rstest::rstest; - #[rstest(input,endian,expected,expected_rest, - case::normal_le([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, [0xCCDD, 0xAABB], bits![u8, Msb0;]), - case::normal_be([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Big, [0xDDCC, 0xBBAA], bits![u8, Msb0;]), + use crate::{ctx::Endian, reader::Reader, DekuReader}; + + #[rstest(input,endian,expected, + case::normal_le([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Little, [0xCCDD, 0xAABB]), + case::normal_be([0xDD, 0xCC, 0xBB, 0xAA].as_ref(), Endian::Big, [0xDDCC, 0xBBAA]), )] - fn test_bit_read( - input: &[u8], - endian: Endian, - expected: [u16; 2], - expected_rest: &BitSlice, - ) { - let bit_slice = input.view_bits::(); + fn test_bit_read(input: &[u8], endian: Endian, expected: [u16; 2]) { + let mut bit_slice = input.view_bits::(); - let (rest, res_read) = <[u16; 2]>::read(bit_slice, endian).unwrap(); + let mut reader = Reader::new(&mut bit_slice); + let res_read = <[u16; 2]>::from_reader_with_ctx(&mut reader, endian).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); } #[rstest(input,endian,expected, @@ -315,11 +127,16 @@ mod tests { expected: [[u16; 2]; 2], expected_rest: &BitSlice, ) { + use no_std_io::io::Cursor; + + use crate::reader::Reader; + let bit_slice = input.view_bits::(); - let (rest, res_read) = <[[u16; 2]; 2]>::read(bit_slice, endian).unwrap(); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = <[[u16; 2]; 2]>::from_reader_with_ctx(&mut reader, endian).unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); } #[cfg(feature = "const_generics")] diff --git a/src/impls/tuple.rs b/src/impls/tuple.rs index 9872c77d..d527d39f 100644 --- a/src/impls/tuple.rs +++ b/src/impls/tuple.rs @@ -1,7 +1,9 @@ //! Implementations of DekuRead and DekuWrite for tuples of length 1 to 11 -use crate::{DekuError, DekuRead, DekuWrite}; use bitvec::prelude::*; +use no_std_io::io::Read; + +use crate::{DekuError, DekuReader, DekuWrite}; // Trait to help us build intermediate tuples while DekuRead'ing each element // from the tuple @@ -34,23 +36,21 @@ macro_rules! ImplDekuTupleTraits { } } - impl<'a, Ctx: Copy, $($T:DekuRead<'a, Ctx>+Sized),+> DekuRead<'a, Ctx> for ($($T,)+) + impl<'a, Ctx: Copy, $($T:DekuReader<'a, Ctx>+Sized),+> DekuReader<'a, Ctx> for ($($T,)+) { - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, ctx: Ctx, - ) -> Result<(&'a BitSlice, Self), DekuError> + ) -> Result where Self: Sized, { let tuple = (); - let mut rest = input; $( - let read = <$T>::read(rest, ctx)?; - rest = read.0; - let tuple = tuple.append(read.1); + let val = <$T>::from_reader_with_ctx(reader, ctx)?; + let tuple = tuple.append(val); )+ - Ok((rest, tuple)) + Ok(tuple) } } @@ -82,27 +82,10 @@ ImplDekuTupleTraits! { A, B, C, D, E, F, G, H, I, J, K, } #[cfg(test)] mod tests { - use super::*; - use crate::native_endian; - use core::fmt::Debug; - use rstest::rstest; - #[rstest(input, expected, expected_rest, - case::length_1([0xef, 0xbe, 0xad, 0xde].as_ref(), (native_endian!(0xdeadbeef_u32),), bits![u8, Msb0;]), - case::length_2([1, 0x24, 0x98, 0x82, 0].as_ref(), (true, native_endian!(0x829824_u32)), bits![u8, Msb0;]), - case::length_11([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10].as_ref(), (0u8, 1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 9u8, 10u8), bits![u8, Msb0;]), - case::extra_rest([1, 0x24, 0x98, 0x82, 0, 0].as_ref(), (true, native_endian!(0x829824_u32)), bits![u8, Msb0; 0, 0, 0, 0, 0, 0, 0, 0]), - )] - fn test_tuple_read<'a, T>(input: &'a [u8], expected: T, expected_rest: &BitSlice) - where - T: DekuRead<'a> + Sized + PartialEq + Debug, - { - let bit_slice = input.view_bits::(); - let (rest, res_read) = ::read(bit_slice, ()).unwrap(); - assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); - } + use super::*; + use crate::native_endian; #[rstest(input, expected, case::length_1((native_endian!(0xdeadbeef_u32),), vec![0xef, 0xbe, 0xad, 0xde]), diff --git a/src/impls/unit.rs b/src/impls/unit.rs index 710f3d72..d5e3d895 100644 --- a/src/impls/unit.rs +++ b/src/impls/unit.rs @@ -1,16 +1,14 @@ -use crate::{DekuError, DekuRead, DekuWrite}; use bitvec::prelude::*; +use no_std_io::io::Read; -impl DekuRead<'_, Ctx> for () { - /// NOP on read - fn read( - input: &BitSlice, +use crate::{DekuError, DekuReader, DekuWrite}; + +impl DekuReader<'_, Ctx> for () { + fn from_reader_with_ctx( + _reader: &mut crate::reader::Reader, _inner_ctx: Ctx, - ) -> Result<(&BitSlice, Self), DekuError> - where - Self: Sized, - { - Ok((input, ())) + ) -> Result { + Ok(()) } } @@ -23,19 +21,21 @@ impl DekuWrite for () { #[cfg(test)] mod tests { + use crate::reader::Reader; + use std::io::Cursor; + use super::*; - use hexlit::hex; #[test] #[allow(clippy::unit_arg)] #[allow(clippy::unit_cmp)] fn test_unit() { - let input = &hex!("FF"); + let input = &[0xff]; - let bit_slice = input.view_bits::(); - let (rest, res_read) = <()>::read(bit_slice, ()).unwrap(); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let res_read = <()>::from_reader_with_ctx(&mut reader, ()).unwrap(); assert_eq!((), res_read); - assert_eq!(bit_slice, rest); let mut res_write = bitvec![u8, Msb0;]; res_read.write(&mut res_write, ()).unwrap(); diff --git a/src/impls/vec.rs b/src/impls/vec.rs index d936819b..a3c66aa5 100644 --- a/src/impls/vec.rs +++ b/src/impls/vec.rs @@ -1,9 +1,13 @@ -use crate::{ctx::*, DekuError, DekuRead, DekuWrite}; -use bitvec::prelude::*; +use no_std_io::io::Read; #[cfg(feature = "alloc")] use alloc::vec::Vec; +use bitvec::prelude::*; + +use crate::{ctx::*, DekuReader}; +use crate::{DekuError, DekuWrite}; + /// Read `T`s into a vec until a given predicate returns true /// * `capacity` - an optional capacity to pre-allocate the vector with /// * `ctx` - The context required by `T`. It will be passed to every `T` when constructing. @@ -11,59 +15,45 @@ use alloc::vec::Vec; /// The predicate takes two parameters: the number of bits that have been read so far, /// and a borrow of the latest value to have been read. It should return `true` if reading /// should now stop, and `false` otherwise -fn read_vec_with_predicate< - 'a, - T: DekuRead<'a, Ctx>, - Ctx: Copy, - Predicate: FnMut(usize, &T) -> bool, ->( - input: &'a BitSlice, +fn reader_vec_with_predicate<'a, T, Ctx, Predicate, R: Read>( + reader: &mut crate::reader::Reader, capacity: Option, ctx: Ctx, mut predicate: Predicate, -) -> Result<(&'a BitSlice, Vec), DekuError> { +) -> Result, DekuError> +where + T: DekuReader<'a, Ctx>, + Ctx: Copy, + Predicate: FnMut(usize, &T) -> bool, +{ let mut res = capacity.map_or_else(Vec::new, Vec::with_capacity); - let mut rest = input; + let start_read = reader.bits_read; loop { - let (new_rest, val) = ::read(rest, ctx)?; + let val = ::from_reader_with_ctx(reader, ctx)?; res.push(val); - rest = new_rest; // This unwrap is safe as we are pushing to the vec immediately before it, // so there will always be a last element - if predicate( - unsafe { rest.as_bitptr().offset_from(input.as_bitptr()) } as usize, - res.last().unwrap(), - ) { + if predicate(reader.bits_read - start_read, res.last().unwrap()) { break; } } - Ok((rest, res)) + Ok(res) } -impl<'a, T: DekuRead<'a, Ctx>, Ctx: Copy, Predicate: FnMut(&T) -> bool> - DekuRead<'a, (Limit, Ctx)> for Vec +impl<'a, T, Ctx, Predicate> DekuReader<'a, (Limit, Ctx)> for Vec +where + T: DekuReader<'a, Ctx>, + Ctx: Copy, + Predicate: FnMut(&T) -> bool, { - /// Read `T`s until the given limit - /// * `limit` - the limiting factor on the amount of `T`s to read - /// * `inner_ctx` - The context required by `T`. It will be passed to every `T`s when constructing. - /// # Examples - /// ```rust - /// # use deku::ctx::*; - /// # use deku::DekuRead; - /// # use deku::bitvec::BitView; - /// let input = vec![1u8, 2, 3, 4]; - /// let (rest, v) = Vec::::read(input.view_bits(), (1.into(), Endian::Little)).unwrap(); - /// assert!(rest.is_empty()); - /// assert_eq!(vec![0x04030201], v) - /// ``` - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, (limit, inner_ctx): (Limit, Ctx), - ) -> Result<(&'a BitSlice, Self), DekuError> + ) -> Result where Self: Sized, { @@ -72,11 +62,11 @@ impl<'a, T: DekuRead<'a, Ctx>, Ctx: Copy, Predicate: FnMut(&T) -> bool> Limit::Count(mut count) => { // Handle the trivial case of reading an empty vector if count == 0 { - return Ok((input, Vec::new())); + return Ok(Vec::new()); } // Otherwise, read until we have read `count` elements - read_vec_with_predicate(input, Some(count), inner_ctx, move |_, _| { + reader_vec_with_predicate(reader, Some(count), inner_ctx, move |_, _| { count -= 1; count == 0 }) @@ -84,7 +74,7 @@ impl<'a, T: DekuRead<'a, Ctx>, Ctx: Copy, Predicate: FnMut(&T) -> bool> // Read until a given predicate returns true Limit::Until(mut predicate, _) => { - read_vec_with_predicate(input, None, inner_ctx, move |_, value| predicate(value)) + reader_vec_with_predicate(reader, None, inner_ctx, move |_, value| predicate(value)) } // Read until a given quantity of bits have been read @@ -93,10 +83,10 @@ impl<'a, T: DekuRead<'a, Ctx>, Ctx: Copy, Predicate: FnMut(&T) -> bool> // Handle the trivial case of reading an empty vector if bit_size == 0 { - return Ok((input, Vec::new())); + return Ok(Vec::new()); } - read_vec_with_predicate(input, None, inner_ctx, move |read_bits, _| { + reader_vec_with_predicate(reader, None, inner_ctx, move |read_bits, _| { read_bits == bit_size }) } @@ -107,10 +97,10 @@ impl<'a, T: DekuRead<'a, Ctx>, Ctx: Copy, Predicate: FnMut(&T) -> bool> // Handle the trivial case of reading an empty vector if bit_size == 0 { - return Ok((input, Vec::new())); + return Ok(Vec::new()); } - read_vec_with_predicate(input, None, inner_ctx, move |read_bits, _| { + reader_vec_with_predicate(reader, None, inner_ctx, move |read_bits, _| { read_bits == bit_size }) } @@ -118,18 +108,18 @@ impl<'a, T: DekuRead<'a, Ctx>, Ctx: Copy, Predicate: FnMut(&T) -> bool> } } -impl<'a, T: DekuRead<'a>, Predicate: FnMut(&T) -> bool> DekuRead<'a, Limit> +impl<'a, T: DekuReader<'a>, Predicate: FnMut(&T) -> bool> DekuReader<'a, Limit> for Vec { /// Read `T`s until the given limit from input for types which don't require context. - fn read( - input: &'a BitSlice, + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, limit: Limit, - ) -> Result<(&'a BitSlice, Self), DekuError> + ) -> Result where Self: Sized, { - Vec::read(input, (limit, ())) + Vec::from_reader_with_ctx(reader, (limit, ())) } } @@ -155,48 +145,57 @@ impl, Ctx: Copy> DekuWrite for Vec { #[cfg(test)] mod tests { - use super::*; use rstest::rstest; - #[rstest(input,endian,bit_size,limit,expected,expected_rest, - case::count_0([0xAA].as_ref(), Endian::Little, Some(8), 0.into(), vec![], bits![u8, Msb0; 1, 0, 1, 0, 1, 0, 1, 0]), - case::count_1([0xAA, 0xBB].as_ref(), Endian::Little, Some(8), 1.into(), vec![0xAA], bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::count_2([0xAA, 0xBB, 0xCC].as_ref(), Endian::Little, Some(8), 2.into(), vec![0xAA, 0xBB], bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0]), - case::until_null([0xAA, 0, 0xBB].as_ref(), Endian::Little, None, (|v: &u8| *v == 0u8).into(), vec![0xAA, 0], bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::until_bits([0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(8).into(), vec![0xAA], bits![u8, Msb0; 1, 0, 1, 1, 1, 0, 1, 1]), - case::bits_6([0b0110_1001, 0b1110_1001].as_ref(), Endian::Little, Some(6), 2.into(), vec![0b00_011010, 0b00_011110], bits![u8, Msb0; 1, 0, 0, 1]), + use crate::reader::Reader; + + use super::*; + + #[rstest(input,endian, bit_size, limit, expected, expected_rest_bits, expected_rest_bytes, + case::count_0([0xAA].as_ref(), Endian::Little, Some(8), 0.into(), vec![], bits![u8, Msb0;], &[0xaa]), + case::count_1([0xAA, 0xBB].as_ref(), Endian::Little, Some(8), 1.into(), vec![0xAA], bits![u8, Msb0;], &[0xbb]), + case::count_2([0xAA, 0xBB, 0xCC].as_ref(), Endian::Little, Some(8), 2.into(), vec![0xAA, 0xBB], bits![u8, Msb0;], &[0xcc]), + case::until_null([0xAA, 0, 0xBB].as_ref(), Endian::Little, None, (|v: &u8| *v == 0u8).into(), vec![0xAA, 0], bits![u8, Msb0;], &[0xbb]), + case::until_bits([0xAA, 0xBB].as_ref(), Endian::Little, None, BitSize(8).into(), vec![0xAA], bits![u8, Msb0;], &[0xbb]), + case::bits_6([0b0110_1001, 0b1110_1001].as_ref(), Endian::Little, Some(6), 2.into(), vec![0b00_011010, 0b00_011110], bits![u8, Msb0; 1, 0, 0, 1], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), vec![], bits![u8, Msb0;]), + case::not_enough_data([].as_ref(), Endian::Little, Some(9), 1.into(), vec![], bits![u8, Msb0;], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(9), 1.into(), vec![], bits![u8, Msb0;]), + case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(9), 1.into(), vec![], bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(8), 2.into(), vec![], bits![u8, Msb0;]), + case::not_enough_data([0xAA].as_ref(), Endian::Little, Some(8), 2.into(), vec![], bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data_until([0xAA].as_ref(), Endian::Little, Some(8), (|_: &u8| false).into(), vec![], bits![u8, Msb0;]), + case::not_enough_data_until([0xAA].as_ref(), Endian::Little, Some(8), (|_: &u8| false).into(), vec![], bits![u8, Msb0;], &[]), #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] - case::not_enough_data_bits([0xAA].as_ref(), Endian::Little, Some(8), (BitSize(16)).into(), vec![], bits![u8, Msb0;]), + case::not_enough_data_bits([0xAA].as_ref(), Endian::Little, Some(8), (BitSize(16)).into(), vec![], bits![u8, Msb0;], &[]), #[should_panic(expected = "Parse(\"too much data: container of 8 bits cannot hold 9 bits\")")] - case::too_much_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(9), 1.into(), vec![], bits![u8, Msb0;]), + case::too_much_data([0xAA, 0xBB].as_ref(), Endian::Little, Some(9), 1.into(), vec![], bits![u8, Msb0;], &[]), )] - fn test_vec_read bool>( - input: &[u8], + fn test_vec_reader bool>( + mut input: &[u8], endian: Endian, bit_size: Option, limit: Limit, expected: Vec, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], ) { - let bit_slice = input.view_bits::(); - - let (rest, res_read) = match bit_size { + let mut reader = Reader::new(&mut input); + let res_read = match bit_size { Some(bit_size) => { - Vec::::read(bit_slice, (limit, (endian, BitSize(bit_size)))).unwrap() + Vec::::from_reader_with_ctx(&mut reader, (limit, (endian, BitSize(bit_size)))) + .unwrap() } - None => Vec::::read(bit_slice, (limit, (endian))).unwrap(), + None => Vec::::from_reader_with_ctx(&mut reader, (limit, (endian))).unwrap(), }; - assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + input.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); } #[rstest(input, endian, expected, @@ -209,32 +208,40 @@ mod tests { } // Note: These tests also exist in boxed.rs - #[rstest(input, endian, bit_size, limit, expected, expected_rest, expected_write, - case::normal_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), 2.into(), vec![0xBBAA, 0xDDCC], bits![u8, Msb0;], vec![0xAA, 0xBB, 0xCC, 0xDD]), - case::normal_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), 2.into(), vec![0xAABB, 0xCCDD], bits![u8, Msb0;], vec![0xAA, 0xBB, 0xCC, 0xDD]), - case::predicate_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), (|v: &u16| *v == 0xBBAA).into(), vec![0xBBAA], bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::predicate_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), (|v: &u16| *v == 0xAABB).into(), vec![0xAABB], bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::bytes_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), BitSize(16).into(), vec![0xBBAA], bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), - case::bytes_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), BitSize(16).into(), vec![0xAABB], bits![u8, Msb0; 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1], vec![0xAA, 0xBB]), + #[rstest(input, endian, bit_size, limit, expected, expected_rest_bits, expected_rest_bytes, expected_write, + case::normal_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), 2.into(), vec![0xBBAA, 0xDDCC], bits![u8, Msb0;], &[], vec![0xAA, 0xBB, 0xCC, 0xDD]), + case::normal_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), 2.into(), vec![0xAABB, 0xCCDD], bits![u8, Msb0;], &[], vec![0xAA, 0xBB, 0xCC, 0xDD]), + case::predicate_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), (|v: &u16| *v == 0xBBAA).into(), vec![0xBBAA], bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::predicate_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), (|v: &u16| *v == 0xAABB).into(), vec![0xAABB], bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::bytes_le([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Little, Some(16), BitSize(16).into(), vec![0xBBAA], bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), + case::bytes_be([0xAA, 0xBB, 0xCC, 0xDD].as_ref(), Endian::Big, Some(16), BitSize(16).into(), vec![0xAABB], bits![u8, Msb0;], &[0xcc, 0xdd], vec![0xAA, 0xBB]), )] - fn test_vec_read_write bool>( - input: &[u8], + fn test_vec_reader_write bool>( + mut input: &[u8], endian: Endian, bit_size: Option, limit: Limit, expected: Vec, - expected_rest: &BitSlice, + expected_rest_bits: &BitSlice, + expected_rest_bytes: &[u8], expected_write: Vec, ) { - let bit_slice = input.view_bits::(); - + let input_clone = input; // Unwrap here because all test cases are `Some`. let bit_size = bit_size.unwrap(); - let (rest, res_read) = - Vec::::read(bit_slice, (limit, (endian, BitSize(bit_size)))).unwrap(); + let mut reader = Reader::new(&mut input); + let res_read = + Vec::::from_reader_with_ctx(&mut reader, (limit, (endian, BitSize(bit_size)))) + .unwrap(); assert_eq!(expected, res_read); - assert_eq!(expected_rest, rest); + assert_eq!( + reader.rest(), + expected_rest_bits.iter().by_vals().collect::>() + ); + let mut buf = vec![]; + input.read_to_end(&mut buf).unwrap(); + assert_eq!(expected_rest_bytes, buf); let mut res_write = bitvec![u8, Msb0;]; res_read @@ -242,6 +249,6 @@ mod tests { .unwrap(); assert_eq!(expected_write, res_write.into_vec()); - assert_eq!(input[..expected_write.len()].to_vec(), expected_write); + assert_eq!(input_clone[..expected_write.len()].to_vec(), expected_write); } } diff --git a/src/lib.rs b/src/lib.rs index 2e015d84..c3d8c8d3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,5 @@ /*! + # Deku: Declarative binary reading and writing Deriving a struct or enum with `DekuRead` and `DekuWrite` provides bit-level, @@ -6,12 +7,15 @@ symmetric, serialization/deserialization implementations. This allows the developer to focus on building and maintaining how the data is represented and manipulated and not on redundant, error-prone, parsing/writing code. - This approach is especially useful when dealing with binary structures such as -TLVs or network protocols. +TLVs or network protocols. This allows the internal rustc compiler to choose +the in-memory representation of the struct, while reading and writing can +understand the struct in a "packed" C way. -Under the hood, it makes use of the [bitvec](https://crates.io/crates/bitvec) -crate as the "Reader" and “Writer” +Under the hood, many specializations are done in order to achieve performant code. +For reading and writing bytes, the std library is used. +When bit-level control is required, it makes use of the [bitvec](https://crates.io/crates/bitvec) +crate as the "Reader" and “Writer”. For documentation and examples on available `#[deku]` attributes and features, see [attributes list](attributes) @@ -26,8 +30,8 @@ For use in `no_std` environments, `alloc` is the single feature which is require # Example Let's read big-endian data into a struct, with fields containing different sizes, -modify a value, and write it back - +modify a value, and write it back. In this example we use [from_bytes](DekuContainerRead::from_bytes), +but we could also use [from_reader](DekuContainerRead::from_reader). ```rust use deku::prelude::*; @@ -57,9 +61,11 @@ assert_eq!(vec![0b0110_1001, 0xC0, 0xFE], data_out); # Composing -Deku structs/enums can be composed as long as they implement DekuRead / DekuWrite traits +Deku structs/enums can be composed as long as they implement [DekuReader] / [DekuWrite] traits which +can be derived by using the `DekuRead` and `DekuWrite` Derive macros. ```rust +# use std::io::Cursor; use deku::prelude::*; #[derive(Debug, PartialEq, DekuRead, DekuWrite)] @@ -98,6 +104,7 @@ If the length of Vec changes, the original field specified in `count` will not g Calling `.update()` can be used to "update" the field! ```rust +# use std::io::Cursor; use deku::prelude::*; #[derive(Debug, PartialEq, DekuRead, DekuWrite)] @@ -163,6 +170,7 @@ based on the field marked with `default`. Example: ```rust +# use std::io::Cursor; use deku::prelude::*; #[derive(Debug, PartialEq, DekuRead, DekuWrite)] @@ -174,12 +182,14 @@ enum DekuTest { VariantB(u16), } -let data: Vec = vec![0x01, 0x02, 0xEF, 0xBE]; +let data: &[u8] = &[0x01, 0x02, 0xEF, 0xBE]; +let mut cursor = Cursor::new(data); -let (rest, val) = DekuTest::from_bytes((data.as_ref(), 0)).unwrap(); +let (_, val) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!(DekuTest::VariantA , val); -let (rest, val) = DekuTest::from_bytes(rest).unwrap(); +// cursor now points at 0x02 +let (_, val) = DekuTest::from_reader((&mut cursor, 0)).unwrap(); assert_eq!(DekuTest::VariantB(0xBEEF) , val); ``` @@ -192,6 +202,7 @@ For more information see [ctx attribute](attributes#ctx) Example: ```rust +# use std::io::Cursor; use deku::prelude::*; #[derive(DekuRead, DekuWrite)] @@ -208,13 +219,37 @@ struct Root { sub: Subtype } -let data: Vec = vec![0x01, 0x02]; +let data: &[u8] = &[0x01, 0x02]; +let mut cursor = Cursor::new(data); -let (rest, value) = Root::from_bytes((&data[..], 0)).unwrap(); +let (amt_read, value) = Root::from_reader((&mut cursor, 0)).unwrap(); assert_eq!(value.a, 0x01); assert_eq!(value.sub.b, 0x01 + 0x02) ``` +# `Read` enabled +Parsers can be created that directly read from a source implementing [Read](crate::no_std_io::Read). + +The crate [no_std_io] is re-exported for use in `no_std` environments. +This functions as an alias for [std::io](https://doc.rust-lang.org/stable/std/io/) when not +using `no_std`. + +```rust, no_run +# use std::io::{Seek, SeekFrom, Read}; +# use std::fs::File; +# use deku::prelude::*; +#[derive(Debug, DekuRead, DekuWrite, PartialEq, Eq, Clone, Hash)] +#[deku(endian = "big")] +struct EcHdr { + magic: [u8; 4], + version: u8, + padding1: [u8; 3], +} + +let mut file = File::options().read(true).open("file").unwrap(); +let ec = EcHdr::from_reader((&mut file, 0)).unwrap(); +``` + # Internal variables and previously read fields Along similar lines to [Context](#context) variables, previously read variables @@ -238,10 +273,7 @@ tokens such as `reader`, `writer`, `map`, `count`, etc. These are provided as a convenience to the user. Always included: -- `deku::input: (&[u8], usize)` - The initial input byte slice and bit offset -(available when using [from_bytes](crate::DekuContainerRead::from_bytes)) -- `deku::input_bits: &BitSlice` - The initial input in bits -- `deku::rest: &BitSlice` - Remaining bits to read +- `deku::reader: &mut Reader` - Current [Reader](crate::reader::Reader) - `deku::output: &mut BitSlice` - The output bit stream Conditionally included if referenced: @@ -294,6 +326,13 @@ extern crate alloc; #[cfg(feature = "alloc")] use alloc::vec::Vec; +/// re-export of no_std_io +pub mod no_std_io { + pub use no_std_io::io::Cursor; + pub use no_std_io::io::Read; + pub use no_std_io::io::Result; +} + /// re-export of bitvec pub mod bitvec { pub use bitvec::prelude::*; @@ -307,28 +346,73 @@ pub mod ctx; pub mod error; mod impls; pub mod prelude; +pub mod reader; pub use crate::error::DekuError; -/// "Reader" trait: read bits and construct type -pub trait DekuRead<'a, Ctx = ()> { - /// Read bits and construct type - /// * **input** - Input as bits - /// * **ctx** - A context required by context-sensitive reading. A unit type `()` means no context - /// needed. +/// "Reader" trait: read bytes and bits from [`no_std_io::Read`]er +pub trait DekuReader<'a, Ctx = ()> { + /// Construct type from `reader` implementing [`no_std_io::Read`], with ctx. /// - /// Returns the remaining bits after parsing in addition to Self. - fn read( - input: &'a bitvec::BitSlice, + /// # Example + /// ```rust, no_run + /// # use std::io::{Seek, SeekFrom, Read}; + /// # use std::fs::File; + /// # use deku::prelude::*; + /// # use deku::ctx::Endian; + /// #[derive(Debug, DekuRead, DekuWrite, PartialEq, Eq, Clone, Hash)] + /// #[deku(endian = "ctx_endian", ctx = "ctx_endian: Endian")] + /// struct EcHdr { + /// magic: [u8; 4], + /// version: u8, + /// } + /// + /// let mut file = File::options().read(true).open("file").unwrap(); + /// file.seek(SeekFrom::Start(0)).unwrap(); + /// let mut reader = Reader::new(&mut file); + /// let ec = EcHdr::from_reader_with_ctx(&mut reader, Endian::Big).unwrap(); + /// ``` + fn from_reader_with_ctx( + reader: &mut crate::reader::Reader, ctx: Ctx, - ) -> Result<(&'a bitvec::BitSlice, Self), DekuError> + ) -> Result where Self: Sized; } /// "Reader" trait: implemented on DekuRead struct and enum containers. A `container` is a type which /// doesn't need any context information. -pub trait DekuContainerRead<'a>: DekuRead<'a, ()> { +pub trait DekuContainerRead<'a>: DekuReader<'a, ()> { + /// Construct type from Reader implementing [`no_std_io::Read`]. + /// * **input** - Input given as "Reader" and bit offset + /// + /// # Returns + /// (amount of total bits read, Self) + /// + /// [BufRead]: std::io::BufRead + /// + /// # Example + /// ```rust, no_run + /// # use std::io::{Seek, SeekFrom, Read}; + /// # use std::fs::File; + /// # use deku::prelude::*; + /// #[derive(Debug, DekuRead, DekuWrite, PartialEq, Eq, Clone, Hash)] + /// #[deku(endian = "big")] + /// struct EcHdr { + /// magic: [u8; 4], + /// version: u8, + /// } + /// + /// let mut file = File::options().read(true).open("file").unwrap(); + /// file.seek(SeekFrom::Start(0)).unwrap(); + /// let ec = EcHdr::from_reader((&mut file, 0)).unwrap(); + /// ``` + fn from_reader( + input: (&'a mut R, usize), + ) -> Result<(usize, Self), DekuError> + where + Self: Sized; + /// Read bytes and construct type /// * **input** - Input given as data and bit offset /// diff --git a/src/prelude.rs b/src/prelude.rs index 2511a0c6..6a70b6d4 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -2,7 +2,8 @@ [What is a prelude?](std::prelude) */ +pub use crate::error::{DekuError, NeedSize}; pub use crate::{ - deku_derive, error::DekuError, error::NeedSize, DekuContainerRead, DekuContainerWrite, - DekuEnumExt, DekuRead, DekuUpdate, DekuWrite, + deku_derive, reader::Reader, DekuContainerRead, DekuContainerWrite, DekuEnumExt, DekuRead, + DekuReader, DekuUpdate, DekuWrite, }; diff --git a/src/reader.rs b/src/reader.rs new file mode 100644 index 00000000..6de4b35a --- /dev/null +++ b/src/reader.rs @@ -0,0 +1,275 @@ +//! Reader for reader functions + +use core::cmp::Ordering; + +use bitvec::prelude::*; +use no_std_io::io::{ErrorKind, Read}; + +use crate::{prelude::NeedSize, DekuError}; +use alloc::vec::Vec; + +#[cfg(feature = "logging")] +use log; + +/// Return from `read_bytes` +pub enum ReaderRet { + /// Successfully read bytes + Bytes, + /// Successfully read bits + Bits(Option>), +} + +/// Reader to use with `from_reader_with_ctx` +pub struct Reader<'a, R: Read> { + inner: &'a mut R, + /// bits stored from previous reads that didn't read to the end of a byte size + leftover: BitVec, + /// Amount of bits read during the use of [read_bits](Reader::read_bits) and [read_bytes](Reader::read_bytes). + pub bits_read: usize, +} + +/// Max bits requested from [`Reader::read_bits`] during one call +pub const MAX_BITS_AMT: usize = 128; + +impl<'a, R: Read> Reader<'a, R> { + /// Create a new `Reader` + #[inline] + pub fn new(inner: &'a mut R) -> Self { + Self { + inner, + leftover: BitVec::new(), // with_capacity 8? + bits_read: 0, + } + } + + /// Return the unused bits + /// + /// Once the parsing is complete for a struct, if the total size of the field using the `bits` attribute + /// isn't byte aligned the returned values could be unexpected as the "Read" will always read + /// to a full byte. + /// + /// ```rust + /// use std::io::Cursor; + /// use deku::prelude::*; + /// + /// #[derive(Debug, PartialEq, DekuRead, DekuWrite)] + /// #[deku(endian = "big")] + /// struct DekuTest { + /// #[deku(bits = "4")] + /// field_a: u8, + /// #[deku(bits = "2")] + /// field_b: u8, + /// } + /// // | | <= this entire byte is Read + /// let data: Vec = vec![0b0110_1101, 0xbe, 0xef]; + /// let mut cursor = Cursor::new(data); + /// let mut reader = Reader::new(&mut cursor); + /// let val = DekuTest::from_reader_with_ctx(&mut reader, ()).unwrap(); + /// assert_eq!(DekuTest { + /// field_a: 0b0110, + /// field_b: 0b11, + /// }, val); + /// + /// // last 2 bits in that byte + /// assert_eq!(reader.rest(), vec![false, true]); + /// ``` + #[inline] + pub fn rest(&mut self) -> Vec { + self.leftover.iter().by_vals().collect() + } + + /// Return true if we are at the end of a reader and there are no cached bits in the reader + /// + /// The byte that was read will be internally buffered + #[inline] + pub fn end(&mut self) -> bool { + if !self.leftover.is_empty() { + #[cfg(feature = "logging")] + log::trace!("not end"); + false + } else { + let mut buf = [0; 1]; + if let Err(e) = self.inner.read_exact(&mut buf) { + if e.kind() == ErrorKind::UnexpectedEof { + #[cfg(feature = "logging")] + log::trace!("end"); + return true; + } + } + + // logic is best if we just turn this into bits right now + self.leftover = BitVec::try_from_slice(&buf).unwrap(); + #[cfg(feature = "logging")] + log::trace!("not end"); + false + } + } + + /// Used at the beginning of `from_reader`. + /// TODO: maybe send into read_bytes() if amt >= 8 + #[inline] + pub fn skip_bits(&mut self, amt: usize) -> Result<(), DekuError> { + #[cfg(feature = "logging")] + log::trace!("skip_bits: {amt}"); + // Save, and keep the leftover bits since the read will most likely be less than a byte + self.read_bits(amt)?; + + Ok(()) + } + + /// Attempt to read bits from `Reader`. If enough bits are already "Read", we just grab + /// enough bits to satisfy `amt`, but will also "Read" more from the stream and store the + /// leftovers if enough are not already "Read". + /// + /// # Guarantees + /// - if Some(bits), the returned `BitVec` will have the size of `amt` and + /// `self.bits_read` will increase by `amt` + /// + /// # Params + /// `amt` - Amount of bits that will be read. Must be <= [`MAX_BITS_AMT`]. + #[inline] + pub fn read_bits(&mut self, amt: usize) -> Result>, DekuError> { + #[cfg(feature = "logging")] + log::trace!("read_bits: requesting {amt} bits"); + if amt == 0 { + #[cfg(feature = "logging")] + log::trace!("read_bits: returned None"); + return Ok(None); + } + let mut ret = BitVec::new(); + + match amt.cmp(&self.leftover.len()) { + // exact match, just use leftover + Ordering::Equal => { + core::mem::swap(&mut ret, &mut self.leftover); + self.leftover.clear(); + } + // previous read was not enough to satisfy the amt requirement, return all previously + Ordering::Greater => { + // read bits + ret.extend_from_bitslice(&self.leftover); + + // calculate the amount of bytes we need to read to read enough bits + let bits_left = amt - self.leftover.len(); + let mut bytes_len = bits_left / 8; + if (bits_left % 8) != 0 { + bytes_len += 1; + } + + // read in new bytes + let mut buf = [0; MAX_BITS_AMT]; + if let Err(e) = self.inner.read_exact(&mut buf[..bytes_len]) { + if e.kind() == ErrorKind::UnexpectedEof { + return Err(DekuError::Incomplete(NeedSize::new(amt))); + } + + // TODO: other errors? + } + let read_buf = &buf[..bytes_len]; + + #[cfg(feature = "logging")] + log::trace!("read_bits: read() {:02x?}", read_buf); + + // create bitslice and remove unused bits + let rest = BitSlice::try_from_slice(read_buf).unwrap(); + let (rest, not_needed) = rest.split_at(bits_left); + core::mem::swap(&mut not_needed.to_bitvec(), &mut self.leftover); + + // create return + ret.extend_from_bitslice(rest); + } + // The entire bits we need to return have been already read previously from bytes but + // not all were read, return required leftover bits + Ordering::Less => { + let used = self.leftover.split_off(amt); + ret.extend_from_bitslice(&self.leftover); + self.leftover = used; + } + } + + self.bits_read += ret.len(); + #[cfg(feature = "logging")] + log::trace!("read_bits: returning {ret}"); + Ok(Some(ret)) + } + + /// Attempt to read bytes from `Reader`. This will return `ReaderRet::Bytes` with a valid + /// `buf` of bytes if we have no "leftover" bytes and thus are byte aligned. If we are not byte + /// aligned, this will call `read_bits` and return `ReaderRet::Bits(_)` of size `amt` * 8. + /// + /// # Params + /// `amt` - Amount of bytes that will be read + #[inline] + pub fn read_bytes(&mut self, amt: usize, buf: &mut [u8]) -> Result { + #[cfg(feature = "logging")] + log::trace!("read_bytes: requesting {amt} bytes"); + if self.leftover.is_empty() { + if buf.len() < amt { + return Err(DekuError::Incomplete(NeedSize::new(amt * 8))); + } + if let Err(e) = self.inner.read_exact(&mut buf[..amt]) { + if e.kind() == ErrorKind::UnexpectedEof { + return Err(DekuError::Incomplete(NeedSize::new(amt * 8))); + } + + // TODO: other errors? + } + + self.bits_read += amt * 8; + + #[cfg(feature = "logging")] + log::trace!("read_bytes: returning {buf:02x?}"); + + Ok(ReaderRet::Bytes) + } else { + Ok(ReaderRet::Bits(self.read_bits(amt * 8)?)) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use hexlit::hex; + use no_std_io::io::Cursor; + + #[test] + fn test_end() { + let input = hex!("aa"); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + assert!(!reader.end()); + let mut buf = [0; 1]; + let _ = reader.read_bytes(1, &mut buf); + assert!(reader.end()); + + let input = hex!("aa"); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + assert!(!reader.end()); + let _ = reader.read_bits(4); + assert!(!reader.end()); + let _ = reader.read_bits(4); + assert!(reader.end()); + } + + #[test] + fn test_bits_less() { + let input = hex!("aa"); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let _ = reader.read_bits(1); + let _ = reader.read_bits(4); + let _ = reader.read_bits(3); + } + + #[test] + fn test_inner() { + let input = hex!("aabbcc"); + let mut cursor = Cursor::new(input); + let mut reader = Reader::new(&mut cursor); + let mut buf = [0; 1]; + let _ = reader.read_bytes(1, &mut buf); + assert_eq!([0xaa], buf); + } +} diff --git a/tests/test_alloc.rs b/tests/test_alloc.rs index 83c8806e..5a735d11 100644 --- a/tests/test_alloc.rs +++ b/tests/test_alloc.rs @@ -38,17 +38,17 @@ struct TestDeku { field_e: Vec, // 1 alloc field_f: [u8; 3], #[deku(bits = "3")] - field_g: u8, // 1 alloc (bits read) + field_g: u8, // 3 allocs (read_bits(Ordering::Greater)) #[deku(bits = "5")] - field_h: u8, // 1 alloc (bits read) - field_i: NestedEnum2, + field_h: u8, // 1 alloc (read_bits(Ordering::Equal)) + //field_i: NestedEnum2, } mod tests { - use super::*; use alloc_counter::count_alloc; use hexlit::hex; - use std::convert::TryFrom; + + use super::*; #[test] #[cfg_attr(miri, ignore)] @@ -57,10 +57,10 @@ mod tests { assert_eq!( count_alloc(|| { - let _ = TestDeku::try_from(input.as_ref()).unwrap(); + let _ = TestDeku::from_reader((&mut input.as_slice(), 0)).unwrap(); }) .0, - (4, 0, 4) + (5, 0, 5) ); } } diff --git a/tests/test_attributes/test_assert.rs b/tests/test_attributes/test_assert.rs index 134a9c13..51efeff3 100644 --- a/tests/test_attributes/test_assert.rs +++ b/tests/test_attributes/test_assert.rs @@ -1,7 +1,8 @@ +use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; use hexlit::hex; use rstest::rstest; -use std::convert::{TryFrom, TryInto}; #[derive(Default, PartialEq, Debug, DekuRead, DekuWrite)] struct TestStruct { diff --git a/tests/test_attributes/test_assert_eq.rs b/tests/test_attributes/test_assert_eq.rs index cdab14be..6cb3ab59 100644 --- a/tests/test_attributes/test_assert_eq.rs +++ b/tests/test_attributes/test_assert_eq.rs @@ -1,7 +1,8 @@ +use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; use hexlit::hex; use rstest::rstest; -use std::convert::{TryFrom, TryInto}; #[derive(Default, PartialEq, Debug, DekuRead, DekuWrite)] struct TestStruct { diff --git a/tests/test_attributes/test_cond.rs b/tests/test_attributes/test_cond.rs index feac39ea..2873d836 100644 --- a/tests/test_attributes/test_cond.rs +++ b/tests/test_attributes/test_cond.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[test] fn test_cond_deku() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] @@ -13,7 +14,7 @@ fn test_cond_deku() { // `cond` is true let test_data: Vec = [0x01, 0x02].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0x01, @@ -28,7 +29,7 @@ fn test_cond_deku() { // `cond` is false let test_data: Vec = [0x02].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0x02, diff --git a/tests/test_attributes/test_ctx.rs b/tests/test_attributes/test_ctx.rs index 46f0c47e..7795ff6f 100644 --- a/tests/test_attributes/test_ctx.rs +++ b/tests/test_attributes/test_ctx.rs @@ -1,7 +1,10 @@ +use std::convert::{TryFrom, TryInto}; +use std::io::Cursor; + use bitvec::bitvec; -use deku::bitvec::{BitView, Msb0}; +use deku::bitvec::Msb0; use deku::prelude::*; -use std::convert::{TryFrom, TryInto}; +use deku::reader::Reader; /// General smoke tests for ctx /// TODO: These should be divided into smaller units @@ -12,7 +15,7 @@ fn test_ctx_struct() { #[deku(ctx = "a: u8, b: u8")] struct SubTypeNeedCtx { #[deku( - reader = "(|rest|{u8::read(rest,()).map(|(slice,c)|(slice,(a+b+c) as usize))})(deku::rest)", + reader = "(u8::from_reader_with_ctx(deku::reader,()).map(|c|(a+b+c) as usize))", writer = "(|c|{u8::write(&(c-a-b), deku::output, ())})(self.i as u8)" )] i: usize, @@ -28,7 +31,7 @@ fn test_ctx_struct() { let test_data = [0x01_u8, 0x02, 0x03]; - let ret_read = FieldLevelCtxStruct::try_from(&test_data[..]).unwrap(); + let ret_read = FieldLevelCtxStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( ret_read, FieldLevelCtxStruct { @@ -52,7 +55,7 @@ fn test_top_level_ctx_enum() { #[deku(id = "1")] VariantA( #[deku( - reader = "(|rest|{u8::read(rest,()).map(|(slice,c)|(slice,(a+b+c)))})(deku::rest)", + reader = "(u8::from_reader_with_ctx(deku::reader,()).map(|c|(a+b+c)))", writer = "(|c|{u8::write(&(c-a-b), deku::output, ())})(field_0)" )] u8, @@ -60,8 +63,11 @@ fn test_top_level_ctx_enum() { } let test_data = [0x01_u8, 0x03]; - let (rest, ret_read) = TopLevelCtxEnum::read(test_data.view_bits(), (1, 2)).unwrap(); - assert!(rest.is_empty()); + let ret_read = TopLevelCtxEnum::from_reader_with_ctx( + &mut Reader::new(&mut Cursor::new(test_data)), + (1, 2), + ) + .unwrap(); assert_eq!(ret_read, TopLevelCtxEnum::VariantA(0x06)); let mut ret_write = bitvec![u8, Msb0;]; @@ -77,7 +83,7 @@ fn test_top_level_ctx_enum_default() { #[deku(id = "1")] VariantA( #[deku( - reader = "(|rest|{u8::read(rest,()).map(|(slice,c)|(slice,(a+b+c)))})(deku::rest)", + reader = "(u8::from_reader_with_ctx(deku::reader, ()).map(|c|(a+b+c)))", writer = "(|c|{u8::write(&(c-a-b), deku::output, ())})(field_0)" )] u8, @@ -88,14 +94,17 @@ fn test_top_level_ctx_enum_default() { let test_data = [0x01_u8, 0x03]; // Use default - let ret_read = TopLevelCtxEnumDefault::try_from(test_data.as_ref()).unwrap(); + let ret_read = TopLevelCtxEnumDefault::try_from(test_data.as_slice()).unwrap(); assert_eq!(expected, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(test_data.to_vec(), ret_write); // Use context - let (rest, ret_read) = TopLevelCtxEnumDefault::read(test_data.view_bits(), (1, 2)).unwrap(); - assert!(rest.is_empty()); + let ret_read = TopLevelCtxEnumDefault::from_reader_with_ctx( + &mut Reader::new(&mut Cursor::new(test_data)), + (1, 2), + ) + .unwrap(); assert_eq!(ret_read, TopLevelCtxEnumDefault::VariantA(0x06)); let mut ret_write = bitvec![u8, Msb0;]; ret_read.write(&mut ret_write, (1, 2)).unwrap(); @@ -137,7 +146,7 @@ fn test_struct_enum_ctx_id() { // VarA let test_data = [0x01_u8, 0x01, 0xab, 0xab]; - let ret_read = StructEnumId::try_from(test_data.as_ref()).unwrap(); + let ret_read = StructEnumId::try_from(test_data.as_slice()).unwrap(); assert_eq!( StructEnumId { @@ -154,7 +163,7 @@ fn test_struct_enum_ctx_id() { // VarB let test_data = [0x02_u8, 0x02]; - let ret_read = StructEnumId::try_from(test_data.as_ref()).unwrap(); + let ret_read = StructEnumId::try_from(test_data.as_slice()).unwrap(); assert_eq!( StructEnumId { @@ -171,7 +180,7 @@ fn test_struct_enum_ctx_id() { // VarC let test_data = [0x02_u8, 0x03, 0xcc]; - let (_, ret_read) = StructEnumId::from_bytes((test_data.as_ref(), 0)).unwrap(); + let (_, ret_read) = StructEnumId::from_reader((&mut test_data.as_slice(), 0)).unwrap(); assert_eq!( StructEnumId { @@ -206,14 +215,17 @@ fn test_ctx_default_struct() { let test_data = [0xffu8]; // Use default - let ret_read = TopLevelCtxStructDefault::try_from(test_data.as_ref()).unwrap(); + let ret_read = TopLevelCtxStructDefault::try_from(test_data.as_slice()).unwrap(); assert_eq!(expected, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(ret_write, test_data); // Use context - let (rest, ret_read) = TopLevelCtxStructDefault::read(test_data.view_bits(), (1, 2)).unwrap(); - assert!(rest.is_empty()); + let ret_read = TopLevelCtxStructDefault::from_reader_with_ctx( + &mut Reader::new(&mut Cursor::new(test_data)), + (1, 2), + ) + .unwrap(); assert_eq!(expected, ret_read); let mut ret_write = bitvec![u8, Msb0;]; ret_read.write(&mut ret_write, (1, 2)).unwrap(); @@ -236,11 +248,11 @@ fn test_enum_endian_ctx() { } let test_data = [0xdeu8, 0xad, 0xbe, 0xef, 0xff]; - let ret_read = EnumTypeEndian::try_from(test_data.as_ref()).unwrap(); + let ret_read = EnumTypeEndian::try_from(test_data.as_slice()).unwrap(); assert_eq!( EnumTypeEndian { - t: EnumTypeEndianCtx::VarA(0xFF) + t: EnumTypeEndianCtx::VarA(0xff) }, ret_read ); diff --git a/tests/test_attributes/test_limits/test_bits_read.rs b/tests/test_attributes/test_limits/test_bits_read.rs index a80bbcf6..c31e6de2 100644 --- a/tests/test_attributes/test_limits/test_bits_read.rs +++ b/tests/test_attributes/test_limits/test_bits_read.rs @@ -1,6 +1,7 @@ +use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; use rstest::rstest; -use std::convert::{TryFrom, TryInto}; mod test_slice { use super::*; @@ -8,17 +9,17 @@ mod test_slice { #[test] fn test_bits_read_static() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { #[deku(bits_read = "16")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - data: test_data.as_ref() + data: test_data.to_vec() }, ret_read ); @@ -38,20 +39,20 @@ mod test_slice { )] fn test_bits_read_from_field(input_bits: u8) { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { bits: u8, #[deku(bits_read = "bits")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [input_bits, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [input_bits, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { bits: 16, - data: &test_data[1..] + data: test_data[1..].to_vec(), }, ret_read ); @@ -63,9 +64,9 @@ mod test_slice { #[test] fn test_bits_read_zero() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { #[deku(bits_read = "0")] - data: &'a [u8], + data: Vec, } let test_data: Vec = [].to_vec(); @@ -73,7 +74,7 @@ mod test_slice { let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); assert_eq!( TestStruct { - data: test_data.as_ref() + data: test_data.clone() }, ret_read ); @@ -94,14 +95,14 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { // We should read 16 bits, not 16 elements, // thus resulting in a single u16 element - data: vec![0xBBAA] + data: vec![0xbbaa] }, ret_read ); @@ -128,16 +129,16 @@ mod test_vec { data: Vec, } - let test_data: Vec = [input_bits, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [input_bits, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { bits: 16, // We should read 16 bits, not 16 elements, // thus resulting in a single u16 element - data: vec![0xBBAA] + data: vec![0xbbaa] }, ret_read ); diff --git a/tests/test_attributes/test_limits/test_bytes_read.rs b/tests/test_attributes/test_limits/test_bytes_read.rs index 28287d34..bd7dc296 100644 --- a/tests/test_attributes/test_limits/test_bytes_read.rs +++ b/tests/test_attributes/test_limits/test_bytes_read.rs @@ -1,6 +1,7 @@ +use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; use rstest::rstest; -use std::convert::{TryFrom, TryInto}; mod test_slice { use super::*; @@ -8,17 +9,17 @@ mod test_slice { #[test] fn test_bytes_read_static() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { #[deku(bytes_read = "2")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - data: test_data.as_ref() + data: test_data.to_vec(), }, ret_read ); @@ -35,20 +36,20 @@ mod test_slice { )] fn test_bytes_read_from_field(input_bytes: u8) { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { bytes: u8, #[deku(bytes_read = "bytes")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [input_bytes, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [input_bytes, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { bytes: 0x02, - data: &test_data[1..] + data: test_data[1..].to_vec() }, ret_read ); @@ -60,17 +61,17 @@ mod test_slice { #[test] fn test_bytes_read_zero() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { #[deku(bytes_read = "0")] - data: &'a [u8], + data: Vec, } let test_data: Vec = [].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - data: test_data.as_ref() + data: test_data.clone() }, ret_read ); @@ -91,14 +92,14 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { // We should read two bytes, not two elements, // thus resulting in a single u16 element - data: vec![0xBBAA] + data: vec![0xbbaa] }, ret_read ); @@ -122,16 +123,16 @@ mod test_vec { data: Vec, } - let test_data: Vec = [input_bytes, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [input_bytes, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { bytes: 0x02, // We should read two bytes, not two elements, // thus resulting in a single u16 element - data: vec![0xBBAA] + data: vec![0xbbaa] }, ret_read ); diff --git a/tests/test_attributes/test_limits/test_count.rs b/tests/test_attributes/test_limits/test_count.rs index 790ce528..c2e78712 100644 --- a/tests/test_attributes/test_limits/test_count.rs +++ b/tests/test_attributes/test_limits/test_count.rs @@ -1,23 +1,24 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + mod test_slice { use super::*; #[test] fn test_count_static() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { #[deku(count = "2")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - data: test_data.as_ref() + data: test_data.to_vec() }, ret_read ); @@ -29,19 +30,19 @@ mod test_slice { #[test] fn test_count_from_field() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { count: u8, #[deku(count = "count")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0x02, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0x02, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { count: 0x02, - data: &test_data[1..] + data: test_data[1..].to_vec(), }, ret_read ); @@ -53,9 +54,9 @@ mod test_slice { #[test] fn test_count_zero() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { #[deku(count = "0")] - data: &'a [u8], + data: Vec, } let test_data: Vec = [].to_vec(); @@ -63,7 +64,7 @@ mod test_slice { let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); assert_eq!( TestStruct { - data: test_data.as_ref() + data: test_data.clone() }, ret_read ); @@ -76,15 +77,15 @@ mod test_slice { #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] fn test_count_error() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { count: u8, #[deku(count = "count")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0x03, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0x03, 0xaa, 0xbb].to_vec(); - let _ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); } } @@ -99,12 +100,12 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - data: vec![0xAA, 0xBB] + data: vec![0xaa, 0xbb] }, ret_read ); @@ -122,13 +123,13 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0x02, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0x02, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { count: 0x02, - data: vec![0xAA, 0xBB] + data: vec![0xaa, 0xbb] }, ret_read ); @@ -164,8 +165,8 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0x03, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0x03, 0xaa, 0xbb].to_vec(); - let _ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); } } diff --git a/tests/test_attributes/test_limits/test_until.rs b/tests/test_attributes/test_limits/test_until.rs index f88a3d5e..dfbb929c 100644 --- a/tests/test_attributes/test_limits/test_until.rs +++ b/tests/test_attributes/test_limits/test_until.rs @@ -1,23 +1,24 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + mod test_slice { use super::*; #[test] fn test_until_static() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { #[deku(until = "|v: &u8| *v == 0xBB")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - data: test_data.as_ref() + data: test_data.to_vec() }, ret_read ); @@ -29,20 +30,20 @@ mod test_slice { #[test] fn test_until_from_field() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { until: u8, #[deku(until = "|v: &u8| *v == *until")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0xBB, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xbb, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - until: 0xBB, - data: &test_data[1..] + until: 0xbb, + data: test_data[1..].to_vec() }, ret_read ); @@ -55,16 +56,16 @@ mod test_slice { #[should_panic(expected = "Incomplete(NeedSize { bits: 8 })")] fn test_until_error() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { + struct TestStruct { until: u8, #[deku(until = "|v: &u8| *v == *until")] - data: &'a [u8], + data: Vec, } - let test_data: Vec = [0xCC, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xcc, 0xaa, 0xbb].to_vec(); - let _ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); } } @@ -79,12 +80,12 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - data: vec![0xAA, 0xBB] + data: vec![0xaa, 0xbb] }, ret_read ); @@ -103,13 +104,13 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0xBB, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xbb, 0xaa, 0xbb].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - until: 0xBB, - data: vec![0xAA, 0xBB] + until: 0xbb, + data: vec![0xaa, 0xbb] }, ret_read ); @@ -129,8 +130,8 @@ mod test_vec { data: Vec, } - let test_data: Vec = [0xCC, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0xcc, 0xaa, 0xbb].to_vec(); - let _ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); } } diff --git a/tests/test_attributes/test_map.rs b/tests/test_attributes/test_map.rs index 90329e59..b639f1b5 100644 --- a/tests/test_attributes/test_map.rs +++ b/tests/test_attributes/test_map.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::TryFrom; +use deku::prelude::*; + #[test] fn test_map() { #[derive(PartialEq, Debug, DekuRead)] @@ -19,7 +20,7 @@ fn test_map() { let test_data: Vec = [0x01, 0x02].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: "1".to_string(), diff --git a/tests/test_attributes/test_padding/mod.rs b/tests/test_attributes/test_padding/mod.rs index a54bd259..5c1d5e7a 100644 --- a/tests/test_attributes/test_padding/mod.rs +++ b/tests/test_attributes/test_padding/mod.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + mod test_pad_bits_after; mod test_pad_bits_before; mod test_pad_bytes_after; @@ -17,20 +18,20 @@ fn test_pad_bits_before_and_pad_bytes_before() { field_b: u8, } - let data: Vec = vec![0b10_000000, 0xAA, 0xBB]; + let data: Vec = vec![0b10_000000, 0xaa, 0xbb]; - let ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0b10, - field_b: 0xBB, + field_b: 0xbb, }, ret_read ); let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(vec![0b10_000000, 0x00, 0xBB], ret_write); + assert_eq!(vec![0b10_000000, 0x00, 0xbb], ret_write); } #[test] @@ -42,18 +43,18 @@ fn test_pad_bits_after_and_pad_bytes_after() { field_b: u8, } - let data: Vec = vec![0b10_000000, 0xAA, 0xBB]; + let data: Vec = vec![0b10_000000, 0xaa, 0xbb]; - let ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0b10, - field_b: 0xBB, + field_b: 0xbb, }, ret_read ); let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(vec![0b10_000000, 0x00, 0xBB], ret_write); + assert_eq!(vec![0b10_000000, 0x00, 0xbb], ret_write); } diff --git a/tests/test_attributes/test_padding/test_pad_bits_after.rs b/tests/test_attributes/test_padding/test_pad_bits_after.rs index cea1887a..254e8ae2 100644 --- a/tests/test_attributes/test_padding/test_pad_bits_after.rs +++ b/tests/test_attributes/test_padding/test_pad_bits_after.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[test] fn test_pad_bits_after() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] @@ -13,7 +14,7 @@ fn test_pad_bits_after() { let data: Vec = vec![0b10_0110_01]; - let ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(data.as_slice()).unwrap(); assert_eq!( TestStruct { @@ -40,7 +41,7 @@ fn test_pad_bits_after_not_enough() { let data: Vec = vec![0b10_0110_01]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] @@ -58,7 +59,7 @@ fn test_pad_bits_after_read_err() { let data: Vec = vec![0b10_01_1001]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] diff --git a/tests/test_attributes/test_padding/test_pad_bits_before.rs b/tests/test_attributes/test_padding/test_pad_bits_before.rs index 9c872aa7..68bf59cf 100644 --- a/tests/test_attributes/test_padding/test_pad_bits_before.rs +++ b/tests/test_attributes/test_padding/test_pad_bits_before.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[test] fn test_pad_bits_before() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] @@ -13,7 +14,7 @@ fn test_pad_bits_before() { let data: Vec = vec![0b10_01_1001]; - let ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(data.as_slice()).unwrap(); assert_eq!( TestStruct { @@ -40,7 +41,7 @@ fn test_pad_bits_before_not_enough() { let data: Vec = vec![0b10_01_1001]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] @@ -58,7 +59,7 @@ fn test_pad_bits_before_read_err() { let data: Vec = vec![0b10_01_1001]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] diff --git a/tests/test_attributes/test_padding/test_pad_bytes_after.rs b/tests/test_attributes/test_padding/test_pad_bytes_after.rs index 787eb60d..846c61c7 100644 --- a/tests/test_attributes/test_padding/test_pad_bytes_after.rs +++ b/tests/test_attributes/test_padding/test_pad_bytes_after.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[test] fn test_pad_bytes_after() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] @@ -10,20 +11,20 @@ fn test_pad_bytes_after() { field_b: u8, } - let data: Vec = vec![0xAA, 0xBB, 0xCC, 0xDD]; + let data: Vec = vec![0xaa, 0xbb, 0xcc, 0xdd]; - let ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(data.as_slice()).unwrap(); assert_eq!( TestStruct { - field_a: 0xAA, - field_b: 0xDD, + field_a: 0xaa, + field_b: 0xdd, }, ret_read ); let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(vec![0xAA, 0x00, 0x00, 0xDD], ret_write); + assert_eq!(vec![0xaa, 0x00, 0x00, 0xdd], ret_write); } #[test] @@ -36,9 +37,9 @@ fn test_pad_bytes_after_not_enough() { field_b: u8, } - let data: Vec = vec![0xAA, 0xBB, 0xCC, 0xDD]; + let data: Vec = vec![0xaa, 0xbb, 0xcc, 0xdd]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] @@ -53,9 +54,9 @@ fn test_pad_bytes_after_read_err() { field_b: u8, } - let data: Vec = vec![0xAA, 0xBB, 0xCC, 0xDD]; + let data: Vec = vec![0xaa, 0xbb, 0xcc, 0xdd]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] @@ -71,8 +72,8 @@ fn test_pad_bytes_after_write_err() { } let data = TestStruct { - field_a: 0xAA, - field_b: 0xDD, + field_a: 0xaa, + field_b: 0xdd, }; let _ret_write: Vec = data.try_into().unwrap(); diff --git a/tests/test_attributes/test_padding/test_pad_bytes_before.rs b/tests/test_attributes/test_padding/test_pad_bytes_before.rs index 7d53d67a..f9a92e39 100644 --- a/tests/test_attributes/test_padding/test_pad_bytes_before.rs +++ b/tests/test_attributes/test_padding/test_pad_bytes_before.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[test] fn test_pad_bytes_before() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] @@ -10,20 +11,20 @@ fn test_pad_bytes_before() { field_b: u8, } - let data: Vec = vec![0xAA, 0xBB, 0xCC, 0xDD]; + let data: Vec = vec![0xaa, 0xbb, 0xcc, 0xdd]; - let ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(data.as_slice()).unwrap(); assert_eq!( TestStruct { - field_a: 0xAA, - field_b: 0xDD, + field_a: 0xaa, + field_b: 0xdd, }, ret_read ); let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(vec![0xAA, 0x00, 0x00, 0xDD], ret_write); + assert_eq!(vec![0xaa, 0x00, 0x00, 0xdd], ret_write); } #[test] @@ -36,9 +37,9 @@ fn test_pad_bytes_before_not_enough() { field_b: u8, } - let data: Vec = vec![0xAA]; + let data: Vec = vec![0xaa]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] @@ -53,9 +54,9 @@ fn test_pad_bytes_before_read_err() { field_b: u8, } - let data: Vec = vec![0xAA, 0xBB, 0xCC, 0xDD]; + let data: Vec = vec![0xaa, 0xbb, 0xcc, 0xdd]; - let _ret_read = TestStruct::try_from(data.as_ref()).unwrap(); + let _ret_read = TestStruct::try_from(data.as_slice()).unwrap(); } #[test] @@ -71,8 +72,8 @@ fn test_pad_bytes_before_write_err() { } let data = TestStruct { - field_a: 0xAA, - field_b: 0xDD, + field_a: 0xaa, + field_b: 0xdd, }; let _ret_write: Vec = data.try_into().unwrap(); diff --git a/tests/test_attributes/test_skip.rs b/tests/test_attributes/test_skip.rs index a554e9ac..9a156d54 100644 --- a/tests/test_attributes/test_skip.rs +++ b/tests/test_attributes/test_skip.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + /// Skip #[test] fn test_skip() { @@ -15,7 +16,7 @@ fn test_skip() { // Skip `field_b` let test_data: Vec = [0x01, 0x02].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0x01, @@ -43,7 +44,7 @@ fn test_skip_default() { // Skip `field_b` and default it's value to 5 let test_data: Vec = [0x01, 0x02].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0x01, @@ -70,7 +71,7 @@ fn test_skip_cond() { // if `cond` is true, skip and default `field_b` to 5 let test_data: Vec = [0x01].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0x01, @@ -85,7 +86,7 @@ fn test_skip_cond() { // if `cond` is false, read `field_b` from input let test_data: Vec = [0x02, 0x03].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_a: 0x02, diff --git a/tests/test_attributes/test_temp.rs b/tests/test_attributes/test_temp.rs index 3893ad96..2a0cb44a 100644 --- a/tests/test_attributes/test_temp.rs +++ b/tests/test_attributes/test_temp.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[test] fn test_temp_field_write() { #[deku_derive(DekuRead, DekuWrite)] @@ -34,7 +35,7 @@ fn test_temp_field_value_ignore_on_read() { let test_data: Vec = [0x02, 0x02, 0x03].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_b: vec![0x02, 0x03] @@ -56,7 +57,7 @@ fn test_temp_field() { let test_data: Vec = [0x01, 0x02].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { field_b: vec![0x02] @@ -76,7 +77,7 @@ fn test_temp_field_unnamed() { let test_data: Vec = [0x01, 0x02].to_vec(); - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!(TestStruct(vec![0x02]), ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); @@ -114,9 +115,9 @@ fn test_temp_enum_field() { }, } - let test_data: Vec = [0xAB, 0x01, 0x02].to_vec(); + let test_data: Vec = [0xab, 0x01, 0x02].to_vec(); - let ret_read = TestEnum::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestEnum::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestEnum::VarA { field_b: vec![0x02] @@ -125,7 +126,7 @@ fn test_temp_enum_field() { ); let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(vec![0xAB, 0x02], ret_write); + assert_eq!(vec![0xab, 0x02], ret_write); } #[test] @@ -148,7 +149,7 @@ fn test_temp_enum_field_write() { VarB(u8), } - let test_data: Vec = [0xAB, 0x01, 0x02].to_vec(); + let test_data: Vec = [0xab, 0x01, 0x02].to_vec(); let ret_write: Vec = TestEnum::VarA { field_b: vec![0x02], } @@ -156,7 +157,7 @@ fn test_temp_enum_field_write() { .unwrap(); assert_eq!(test_data, ret_write); - let test_data: Vec = [0xBA, 0x10].to_vec(); + let test_data: Vec = [0xba, 0x10].to_vec(); let ret_write: Vec = TestEnum::VarB(0x10).to_bytes().unwrap(); assert_eq!(test_data, ret_write); } diff --git a/tests/test_attributes/test_update.rs b/tests/test_attributes/test_update.rs index 19dcb69d..c8feec49 100644 --- a/tests/test_attributes/test_update.rs +++ b/tests/test_attributes/test_update.rs @@ -1,6 +1,7 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + /// Update field value #[test] fn test_update() { @@ -13,7 +14,7 @@ fn test_update() { // Update `field_a` to 5 let test_data: Vec = [0x01].to_vec(); - let mut ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let mut ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!(TestStruct { field_a: 0x01 }, ret_read); // `field_a` field should now be increased @@ -36,20 +37,20 @@ fn test_update_from_field() { } // Update the value of `count` to the length of `data` - let test_data: Vec = [0x02, 0xAA, 0xBB].to_vec(); + let test_data: Vec = [0x02, 0xaa, 0xbb].to_vec(); // Read - let mut ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let mut ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { count: 0x02, - data: vec![0xAA, 0xBB] + data: vec![0xaa, 0xbb] }, ret_read ); // Add an item to the vec - ret_read.data.push(0xFF); + ret_read.data.push(0xff); // `count` field should now be increased ret_read.update().unwrap(); @@ -57,7 +58,7 @@ fn test_update_from_field() { // Write let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!([0x03, 0xAA, 0xBB, 0xFF].to_vec(), ret_write); + assert_eq!([0x03, 0xaa, 0xbb, 0xff].to_vec(), ret_write); } /// Update error diff --git a/tests/test_catch_all.rs b/tests/test_catch_all.rs index a068f86e..1565cf01 100644 --- a/tests/test_catch_all.rs +++ b/tests/test_catch_all.rs @@ -1,8 +1,8 @@ #[cfg(test)] mod test { + use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; - use std::convert::TryFrom; - use std::convert::TryInto; /// Basic test struct #[derive(Clone, Copy, PartialEq, Eq, Debug, DekuWrite, DekuRead)] @@ -38,8 +38,8 @@ mod test { #[test] fn test_basic_a() { - let input = [0u8]; - let ret_read = BasicMapping::try_from(input.as_slice()).unwrap(); + let input: &[u8] = &[0u8]; + let ret_read = BasicMapping::try_from(input).unwrap(); assert_eq!(BasicMapping::A, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(input.to_vec(), ret_write); @@ -47,8 +47,8 @@ mod test { #[test] fn test_basic_c() { - let input = [2u8]; - let ret_read = BasicMapping::try_from(input.as_slice()).unwrap(); + let input: &[u8] = &[2u8]; + let ret_read = BasicMapping::try_from(input).unwrap(); assert_eq!(BasicMapping::C, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(input.to_vec(), ret_write); @@ -56,9 +56,9 @@ mod test { #[test] fn test_basic_pattern() { - let input = [10u8]; + let input: &[u8] = &[10u8]; let output = [BasicMapping::C as u8]; - let ret_read = BasicMapping::try_from(input.as_slice()).unwrap(); + let ret_read = BasicMapping::try_from(input).unwrap(); assert_eq!(BasicMapping::C, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(output.to_vec(), ret_write); @@ -66,9 +66,9 @@ mod test { #[test] fn test_advanced_remapping() { - let input = [1u8]; + let input: &[u8] = &[1u8]; let output = [1u8]; - let ret_read = AdvancedRemapping::try_from(input.as_slice()).unwrap(); + let ret_read = AdvancedRemapping::try_from(input).unwrap(); assert_eq!(AdvancedRemapping::A, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(output.to_vec(), ret_write); @@ -76,9 +76,9 @@ mod test { #[test] fn test_advanced_remapping_default_field() { - let input = [10u8]; + let input: &[u8] = &[10u8]; let output = [3u8]; - let ret_read = AdvancedRemapping::try_from(input.as_slice()).unwrap(); + let ret_read = AdvancedRemapping::try_from(input).unwrap(); assert_eq!(AdvancedRemapping::C, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(output.to_vec(), ret_write); diff --git a/tests/test_compile/cases/internal_variables.rs b/tests/test_compile/cases/internal_variables.rs index 5a78923a..0108c685 100644 --- a/tests/test_compile/cases/internal_variables.rs +++ b/tests/test_compile/cases/internal_variables.rs @@ -1,97 +1,93 @@ +use deku::bitvec::{BitVec, Msb0}; use deku::prelude::*; -use deku::bitvec::{BitVec, BitSlice, Msb0}; #[derive(DekuRead, DekuWrite)] struct TestCount { field_a: u8, #[deku(count = "deku::byte_offset")] - field_b: Vec + field_b: Vec, } #[derive(DekuRead, DekuWrite)] struct TestBitRead { field_a: u8, #[deku(bits_read = "deku::bit_offset")] - field_b: Vec + field_b: Vec, } #[derive(DekuRead, DekuWrite)] struct TestBytesRead { field_a: u8, #[deku(bytes_read = "deku::bit_offset")] - field_b: Vec + field_b: Vec, } #[derive(DekuRead, DekuWrite)] struct TestUntil { field_a: u8, #[deku(until = "|v| *v as usize == deku::bit_offset")] - field_b: Vec + field_b: Vec, } #[derive(DekuRead, DekuWrite)] struct TestCond { field_a: u8, #[deku(cond = "deku::bit_offset == *field_a as usize")] - field_b: u8 + field_b: u8, } #[derive(DekuRead, DekuWrite)] struct TestDefault { field_a: u8, #[deku(skip, default = "deku::byte_offset")] - field_b: usize + field_b: usize, } #[derive(DekuRead, DekuWrite)] struct TestMap { field_a: u8, #[deku(map = "|v: u8| -> Result<_, DekuError> { Ok(v as usize + deku::byte_offset) }")] - field_b: usize + field_b: usize, } -fn dummy_reader( +fn dummy_reader( offset: usize, - rest: &BitSlice, -) -> Result<(&BitSlice, usize), DekuError> { - Ok((rest, offset)) + _reader: &mut Reader, +) -> Result { + Ok(0) } #[derive(DekuRead, DekuWrite)] struct TestReader { field_a: u8, - #[deku(reader = "dummy_reader(deku::byte_offset, deku::rest)")] - field_b: usize + #[deku(reader = "dummy_reader(deku::byte_offset, deku::reader)")] + field_b: usize, } #[derive(DekuRead, DekuWrite)] #[deku(ctx = "_byte_size: usize, _bit_size: usize")] -struct ChildCtx { -} +struct ChildCtx {} #[derive(DekuRead, DekuWrite)] struct TestCtx { field_a: u8, #[deku(ctx = "deku::byte_offset, deku::bit_offset")] - field_b: ChildCtx + field_b: ChildCtx, } -fn dummy_writer( - _offset: usize, - _output: &mut BitVec, -) -> Result<(), DekuError> { +fn dummy_writer(_offset: usize, _output: &mut BitVec) -> Result<(), DekuError> { Ok(()) } #[derive(DekuRead, DekuWrite)] struct TestWriter { field_a: u8, #[deku(writer = "dummy_writer(deku::byte_offset, deku::output)")] - field_b: usize + field_b: usize, } #[derive(DekuRead, DekuWrite)] struct FailInternal { field_a: u8, #[deku(cond = "__deku_bit_offset == *field_a as usize")] - field_b: u8 + field_b: u8, } fn main() {} diff --git a/tests/test_compile/cases/internal_variables.stderr b/tests/test_compile/cases/internal_variables.stderr index 5460e6c1..891a145d 100644 --- a/tests/test_compile/cases/internal_variables.stderr +++ b/tests/test_compile/cases/internal_variables.stderr @@ -1,5 +1,5 @@ error: Unexpected meta-item format `attribute cannot contain `__deku_` these are internal variables. Please use the `deku::` instead.` - --> $DIR/internal_variables.rs:93:19 + --> tests/test_compile/cases/internal_variables.rs:89:19 | -93 | #[deku(cond = "__deku_bit_offset == *field_a as usize")] +89 | #[deku(cond = "__deku_bit_offset == *field_a as usize")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/test_compile/cases/unknown_endian.stderr b/tests/test_compile/cases/unknown_endian.stderr index f6277a95..bfb919e8 100644 --- a/tests/test_compile/cases/unknown_endian.stderr +++ b/tests/test_compile/cases/unknown_endian.stderr @@ -1,5 +1,5 @@ error[E0425]: cannot find value `variable` in this scope - --> $DIR/unknown_endian.rs:3:10 + --> tests/test_compile/cases/unknown_endian.rs:3:10 | 3 | #[derive(DekuRead)] | ^^^^^^^^ not found in this scope @@ -7,7 +7,7 @@ error[E0425]: cannot find value `variable` in this scope = note: this error originates in the derive macro `DekuRead` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0425]: cannot find value `variable` in this scope - --> $DIR/unknown_endian.rs:9:10 + --> tests/test_compile/cases/unknown_endian.rs:9:10 | 9 | #[derive(DekuRead)] | ^^^^^^^^ not found in this scope @@ -15,7 +15,7 @@ error[E0425]: cannot find value `variable` in this scope = note: this error originates in the derive macro `DekuRead` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0425]: cannot find value `variable` in this scope - --> $DIR/unknown_endian.rs:15:10 + --> tests/test_compile/cases/unknown_endian.rs:15:10 | 15 | #[derive(DekuRead)] | ^^^^^^^^ not found in this scope @@ -23,27 +23,15 @@ error[E0425]: cannot find value `variable` in this scope = note: this error originates in the derive macro `DekuRead` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0425]: cannot find value `variable` in this scope - --> $DIR/unknown_endian.rs:19:10 + --> tests/test_compile/cases/unknown_endian.rs:19:10 | 19 | #[derive(DekuRead)] | ^^^^^^^^ not found in this scope | = note: this error originates in the derive macro `DekuRead` (in Nightly builds, run with -Z macro-backtrace for more info) -warning: unreachable statement - --> $DIR/unknown_endian.rs:15:10 - | -15 | #[derive(DekuRead)] - | ^^^^^^^^ - | | - | unreachable statement - | any code following this `match` expression is unreachable, as all arms diverge - | - = note: `#[warn(unreachable_code)]` on by default - = note: this warning originates in the derive macro `DekuRead` (in Nightly builds, run with -Z macro-backtrace for more info) - warning: unreachable expression - --> $DIR/unknown_endian.rs:15:10 + --> tests/test_compile/cases/unknown_endian.rs:15:10 | 15 | #[derive(DekuRead)] | ^^^^^^^^ @@ -51,4 +39,5 @@ warning: unreachable expression | unreachable expression | any code following this `match` expression is unreachable, as all arms diverge | + = note: `#[warn(unreachable_code)]` on by default = note: this warning originates in the derive macro `DekuRead` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/tests/test_enum.rs b/tests/test_enum.rs index f652f298..a3ec4d71 100644 --- a/tests/test_enum.rs +++ b/tests/test_enum.rs @@ -1,7 +1,8 @@ +use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; use hexlit::hex; use rstest::*; -use std::convert::{TryFrom, TryInto}; /// General smoke tests for enums /// TODO: These should be divided into smaller tests @@ -41,11 +42,12 @@ enum TestEnum { case(&hex!("FFFFFF"), TestEnum::VarA(0xFF)), )] fn test_enum(input: &[u8], expected: TestEnum) { - let ret_read = TestEnum::try_from(input).unwrap(); + let input = input.to_vec(); + let ret_read = TestEnum::try_from(input.as_slice()).unwrap(); assert_eq!(expected, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(input.to_vec(), ret_write); + assert_eq!(input, ret_write); } #[test] @@ -58,8 +60,8 @@ fn test_enum_error() { VarA(u8), } - let test_data: Vec = [0x02, 0x02].to_vec(); - let _ret_read = TestEnum::try_from(test_data.as_ref()).unwrap(); + let test_data = &mut [0x02, 0x02]; + let _ret_read = TestEnum::try_from(test_data.as_slice()).unwrap(); } #[derive(PartialEq, Debug, DekuRead, DekuWrite)] @@ -79,11 +81,12 @@ enum TestEnumDiscriminant { case(&hex!("03"), TestEnumDiscriminant::VarA), )] fn test_enum_discriminant(input: &[u8], expected: TestEnumDiscriminant) { - let ret_read = TestEnumDiscriminant::try_from(input).unwrap(); + let input = input.to_vec(); + let ret_read = TestEnumDiscriminant::try_from(input.as_slice()).unwrap(); assert_eq!(expected, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(input.to_vec(), ret_write); + assert_eq!(input, ret_write); } #[test] @@ -97,11 +100,55 @@ fn test_enum_array_type() { VarB, } - let input = b"123".as_ref(); + let input = b"123".to_vec(); - let ret_read = TestEnumArray::try_from(input).unwrap(); + let ret_read = TestEnumArray::try_from(input.as_slice()).unwrap(); assert_eq!(TestEnumArray::VarA, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(input.to_vec(), ret_write); } + +#[test] +fn test_id_pat_with_id() { + // In these tests, the id_pat is already stored in the previous read to `my_id`, so we don't + // use that for the next reading... + + #[derive(PartialEq, Debug, DekuRead, DekuWrite)] + pub struct DekuTest { + my_id: u8, + #[deku(ctx = "*my_id")] + enum_from_id: MyEnum, + } + + #[derive(PartialEq, Debug, DekuRead, DekuWrite)] + #[deku(ctx = "my_id: u8", id = "my_id")] + pub enum MyEnum { + #[deku(id_pat = "1..=2")] + VariantA(u8), + #[deku(id_pat = "_")] + VariantB, + } + + let input = [0x01, 0x02]; + let (_, v) = DekuTest::from_reader((&mut input.as_slice(), 0)).unwrap(); + assert_eq!( + v, + DekuTest { + my_id: 0x01, + enum_from_id: MyEnum::VariantA(2) + } + ); + assert_eq!(input, &*v.to_bytes().unwrap()); + + let input = [0x05]; + let (_, v) = DekuTest::from_reader((&mut input.as_slice(), 0)).unwrap(); + assert_eq!( + v, + DekuTest { + my_id: 0x05, + enum_from_id: MyEnum::VariantB + } + ); + assert_eq!(input, &*v.to_bytes().unwrap()); +} diff --git a/tests/test_from_reader.rs b/tests/test_from_reader.rs new file mode 100644 index 00000000..537a6b99 --- /dev/null +++ b/tests/test_from_reader.rs @@ -0,0 +1,59 @@ +use deku::prelude::*; +use no_std_io::io::Seek; + +#[test] +fn test_from_reader_struct() { + #[derive(Debug, PartialEq, DekuRead, DekuWrite)] + struct TestDeku(#[deku(bits = 4)] u8); + + let test_data: Vec = [0b0110_0110u8, 0b0101_1010u8].to_vec(); + let mut c = std::io::Cursor::new(test_data); + + c.rewind().unwrap(); + let (amt_read, ret_read) = TestDeku::from_reader((&mut c, 0)).unwrap(); + assert_eq!(amt_read, 4); + let mut total_read = amt_read; + assert_eq!(TestDeku(0b0110), ret_read); + + c.rewind().unwrap(); + let (amt_read, ret_read) = TestDeku::from_reader((&mut c, total_read)).unwrap(); + assert_eq!(amt_read, 8); + total_read = amt_read; + assert_eq!(TestDeku(0b0110), ret_read); + + env_logger::init(); + c.rewind().unwrap(); + let (amt_read, ret_read) = TestDeku::from_reader((&mut c, total_read)).unwrap(); + assert_eq!(amt_read, 12); + total_read = amt_read; + assert_eq!(TestDeku(0b0101), ret_read); + + c.rewind().unwrap(); + let (amt_read, ret_read) = TestDeku::from_reader((&mut c, total_read)).unwrap(); + assert_eq!(amt_read, 16); + assert_eq!(TestDeku(0b1010), ret_read); +} + +#[test] +fn test_from_reader_enum() { + #[derive(Debug, PartialEq, DekuRead, DekuWrite)] + #[deku(type = "u8", bits = "4")] + enum TestDeku { + #[deku(id = "0b0110")] + VariantA(#[deku(bits = "4")] u8), + #[deku(id = "0b0101")] + VariantB(#[deku(bits = "2")] u8), + } + + let test_data = [0b0110_0110u8, 0b0101_1010u8]; + let mut c = std::io::Cursor::new(test_data); + + let (first_amt_read, ret_read) = TestDeku::from_reader((&mut c, 0)).unwrap(); + assert_eq!(first_amt_read, 8); + assert_eq!(TestDeku::VariantA(0b0110), ret_read); + c.rewind().unwrap(); + + let (amt_read, ret_read) = TestDeku::from_reader((&mut c, first_amt_read)).unwrap(); + assert_eq!(amt_read, 6 + first_amt_read); + assert_eq!(TestDeku::VariantB(0b10), ret_read); +} diff --git a/tests/test_generic.rs b/tests/test_generic.rs index 6151c223..dfb3b8f9 100644 --- a/tests/test_generic.rs +++ b/tests/test_generic.rs @@ -1,19 +1,20 @@ -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + #[test] fn test_generic_struct() { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] struct TestStruct where - T: deku::DekuWrite + for<'a> deku::DekuRead<'a>, + T: deku::DekuWrite + for<'a> deku::DekuReader<'a>, { field_a: T, } let test_data: Vec = [0x01].to_vec(); - let ret_read = TestStruct::::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::::try_from(test_data.as_slice()).unwrap(); assert_eq!(TestStruct:: { field_a: 0x01 }, ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); @@ -26,7 +27,7 @@ fn test_generic_enum() { #[deku(type = "u8")] enum TestEnum where - T: deku::DekuWrite + for<'a> deku::DekuRead<'a>, + T: deku::DekuWrite + for<'a> deku::DekuReader<'a>, { #[deku(id = "1")] VariantT(T), @@ -34,31 +35,9 @@ fn test_generic_enum() { let test_data: Vec = [0x01, 0x02].to_vec(); - let ret_read = TestEnum::::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestEnum::::try_from(test_data.as_slice()).unwrap(); assert_eq!(TestEnum::::VariantT(0x02), ret_read); let ret_write: Vec = ret_read.try_into().unwrap(); assert_eq!(test_data, ret_write); } - -#[test] -fn test_slice_struct() { - #[derive(PartialEq, Debug, DekuRead, DekuWrite)] - struct TestStruct<'a> { - #[deku(count = "2")] - field_a: &'a [u8], - } - - let test_data: Vec = [0x01, 0x02].to_vec(); - - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); - assert_eq!( - TestStruct { - field_a: test_data.as_ref() - }, - ret_read - ); - - let ret_write: Vec = ret_read.try_into().unwrap(); - assert_eq!(test_data, ret_write); -} diff --git a/tests/test_magic.rs b/tests/test_magic.rs index dbf8240f..29d55c84 100644 --- a/tests/test_magic.rs +++ b/tests/test_magic.rs @@ -1,7 +1,8 @@ +use std::convert::{TryFrom, TryInto}; + use deku::prelude::*; use hexlit::hex; use rstest::rstest; -use std::convert::{TryFrom, TryInto}; #[rstest(input, case(&hex!("64656b75")), @@ -25,8 +26,8 @@ fn test_magic_struct(input: &[u8]) { #[derive(PartialEq, Debug, DekuRead, DekuWrite)] #[deku(magic = b"deku")] struct TestStruct {} - - let ret_read = TestStruct::try_from(input).unwrap(); + let input = input.to_vec(); + let ret_read = TestStruct::try_from(input.as_slice()).unwrap(); assert_eq!(TestStruct {}, ret_read); @@ -62,8 +63,9 @@ fn test_magic_enum(input: &[u8]) { #[deku(id = "0")] Variant, } + let input = input.to_vec(); - let ret_read = TestEnum::try_from(input).unwrap(); + let ret_read = TestEnum::try_from(input.as_slice()).unwrap(); assert_eq!(TestEnum::Variant, ret_read); diff --git a/tests/test_regression.rs b/tests/test_regression.rs index 4314414e..28fbcb89 100644 --- a/tests/test_regression.rs +++ b/tests/test_regression.rs @@ -61,7 +61,8 @@ fn issue_224() { }, }; let bytes = packet.to_bytes().unwrap(); - let _packet = Packet::from_bytes((&bytes, 0)).unwrap(); + let mut c = std::io::Cursor::new(bytes); + let _packet = Packet::from_reader((&mut c, 0)).unwrap(); } // Extra zeroes added when reading fewer bytes than needed to fill a number @@ -88,8 +89,9 @@ mod issue_282 { // the u32 is stored as three bytes in big-endian order assert_eq!(zero, 0); - let data = &[a, b, c, a, b, c]; - let (_, BitsBytes { bits, bytes }) = BitsBytes::from_bytes((data, 0)).unwrap(); + let data = [a, b, c, a, b, c]; + let (_, BitsBytes { bits, bytes }) = + BitsBytes::from_reader((&mut data.as_slice(), 0)).unwrap(); assert_eq!(bits, expected); assert_eq!(bytes, expected); @@ -113,8 +115,9 @@ mod issue_282 { // the u32 is stored as three bytes in little-endian order assert_eq!(zero, 0); - let data = &[a, b, c, a, b, c]; - let (_, BitsBytes { bits, bytes }) = BitsBytes::from_bytes((data, 0)).unwrap(); + let data = [a, b, c, a, b, c]; + let (_, BitsBytes { bits, bytes }) = + BitsBytes::from_reader((&mut data.as_slice(), 0)).unwrap(); assert_eq!(bits, expected); assert_eq!(bytes, expected); @@ -126,11 +129,11 @@ mod issue_282 { // https://github.com/sharksforarms/deku/issues/292 #[test] fn test_regression_292() { - let test_data: &[u8] = [0x0F, 0xF0].as_ref(); + let test_data = [0x0f, 0xf0]; #[derive(Debug, PartialEq, DekuRead)] #[deku(endian = "little")] - struct Container { + struct Reader { #[deku(bits = 4)] field1: u8, field2: u8, @@ -139,8 +142,10 @@ fn test_regression_292() { } assert_eq!( - Container::from_bytes((test_data, 0)).unwrap().1, - Container { + Reader::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + Reader { field1: 0, field2: 0xff, field3: 0, @@ -149,7 +154,7 @@ fn test_regression_292() { #[derive(Debug, PartialEq, DekuRead)] #[deku(endian = "little")] - struct ContainerBits { + struct ReaderBits { #[deku(bits = 4)] field1: u8, #[deku(bits = 8)] @@ -159,8 +164,10 @@ fn test_regression_292() { } assert_eq!( - ContainerBits::from_bytes((test_data, 0)).unwrap().1, - ContainerBits { + ReaderBits::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderBits { field1: 0, field2: 0xff, field3: 0, @@ -168,7 +175,7 @@ fn test_regression_292() { ); #[derive(Debug, PartialEq, DekuRead)] - struct ContainerByteNoEndian { + struct ReaderByteNoEndian { #[deku(bits = 4)] field1: u8, field2: u8, @@ -177,8 +184,10 @@ fn test_regression_292() { } assert_eq!( - ContainerByteNoEndian::from_bytes((test_data, 0)).unwrap().1, - ContainerByteNoEndian { + ReaderByteNoEndian::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderByteNoEndian { field1: 0, field2: 0xff, field3: 0, @@ -186,7 +195,7 @@ fn test_regression_292() { ); #[derive(Debug, PartialEq, DekuRead)] - struct ContainerBitPadding { + struct ReaderBitPadding { #[deku(pad_bits_before = "4")] field2: u8, #[deku(bits = 4)] @@ -194,15 +203,17 @@ fn test_regression_292() { } assert_eq!( - ContainerBitPadding::from_bytes((test_data, 0)).unwrap().1, - ContainerBitPadding { + ReaderBitPadding::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderBitPadding { field2: 0xff, field3: 0, } ); #[derive(Debug, PartialEq, DekuRead)] - struct ContainerBitPadding1 { + struct ReaderBitPadding1 { #[deku(bits = 2)] field1: u8, #[deku(pad_bits_before = "2")] @@ -212,19 +223,21 @@ fn test_regression_292() { } assert_eq!( - ContainerBitPadding1::from_bytes((test_data, 0)).unwrap().1, - ContainerBitPadding1 { + ReaderBitPadding1::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderBitPadding1 { field1: 0, field2: 0xff, field3: 0, } ); - let test_data: &[u8] = [0b11000000, 0b00111111].as_ref(); + let test_data = [0b11000000, 0b00111111]; #[derive(Debug, PartialEq, DekuRead)] #[deku(endian = "little")] - struct ContainerTwo { + struct ReaderTwo { #[deku(bits = 2)] field1: u8, field2: u8, @@ -233,19 +246,21 @@ fn test_regression_292() { } assert_eq!( - ContainerTwo::from_bytes((test_data, 0)).unwrap().1, - ContainerTwo { + ReaderTwo::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderTwo { field1: 0b11, field2: 0, field3: 0b111111, } ); - let test_data: &[u8] = [0b11000000, 0b00000000, 0b00111111].as_ref(); + let test_data = [0b11000000, 0b00000000, 0b00111111]; #[derive(Debug, PartialEq, DekuRead)] #[deku(endian = "little")] - struct ContainerU16 { + struct ReaderU16Le { #[deku(bits = 2)] field1: u8, field2: u16, @@ -254,13 +269,84 @@ fn test_regression_292() { } assert_eq!( - ContainerU16::from_bytes((test_data, 0)).unwrap().1, - ContainerU16 { + ReaderU16Le::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderU16Le { field1: 0b11, field2: 0, field3: 0b111111, } ); + + let test_data = [0b11000000, 0b00000000, 0b00111111]; + + #[derive(Debug, PartialEq, DekuRead)] + #[deku(endian = "big")] + struct ReaderU16Be { + #[deku(bits = 2)] + field1: u8, + field2: u16, + #[deku(bits = 6)] + field3: u8, + } + + assert_eq!( + ReaderU16Be::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderU16Be { + field1: 0b11, + field2: 0, + field3: 0b111111, + } + ); + + let test_data = [0b11000000, 0b00000000, 0b01100001]; + + #[derive(Debug, PartialEq, DekuRead)] + #[deku(endian = "big")] + struct ReaderI16Le { + #[deku(bits = 2)] + field1: i8, + field2: i16, + #[deku(bits = 6)] + field3: i8, + } + + assert_eq!( + ReaderI16Le::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderI16Le { + field1: -0b01, + field2: 1, + field3: -0b011111, + } + ); + + let test_data = [0b11000000, 0b00000000, 0b01100001]; + + #[derive(Debug, PartialEq, DekuRead)] + #[deku(endian = "big")] + struct ReaderI16Be { + #[deku(bits = 2)] + field1: i8, + field2: i16, + #[deku(bits = 6)] + field3: i8, + } + + assert_eq!( + ReaderI16Be::from_reader((&mut test_data.as_slice(), 0)) + .unwrap() + .1, + ReaderI16Be { + field1: -0b01, + field2: 1, + field3: -0b011111, + } + ); } #[test] diff --git a/tests/test_struct.rs b/tests/test_struct.rs index b239ca43..063bf5a0 100644 --- a/tests/test_struct.rs +++ b/tests/test_struct.rs @@ -1,8 +1,9 @@ #![allow(clippy::unusual_byte_groupings)] -use deku::prelude::*; use std::convert::{TryFrom, TryInto}; +use deku::prelude::*; + mod test_common; /// General smoke tests for structs @@ -34,7 +35,7 @@ fn test_read_too_much_data() { pub field_a: u8, } - let test_data = [0u8; 100].as_ref(); + let test_data: &[u8] = &[0u8; 100]; TestStruct::try_from(test_data).unwrap(); } @@ -53,39 +54,39 @@ fn test_unnamed_struct() { ); let test_data: Vec = [ - 0xFF, + 0xff, 0b1001_0110, - 0xAA, - 0xBB, - 0xCC, - 0xDD, + 0xaa, + 0xbb, + 0xcc, + 0xdd, 0b1001_0110, - 0xCC, - 0xDD, + 0xcc, + 0xdd, 0x02, - 0xBE, - 0xEF, + 0xbe, + 0xef, ] .to_vec(); // Read - let ret_read = TestUnamedStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestUnamedStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestUnamedStruct( - 0xFF, + 0xff, 0b0000_0010, 0b0001_0110, - native_endian!(0xBBAAu16), - 0xCCDDu16, + native_endian!(0xbbaau16), + 0xccddu16, NestedDeku { nest_a: 0b00_100101, nest_b: 0b10, inner: DoubleNestedDeku { - data: native_endian!(0xDDCCu16) + data: native_endian!(0xddccu16) } }, 0x02, - vec![0xBE, 0xEF], + vec![0xbe, 0xef], ), ret_read ); @@ -117,41 +118,41 @@ fn test_named_struct() { } let test_data: Vec = [ - 0xFF, + 0xff, 0b1001_0110, - 0xAA, - 0xBB, - 0xCC, - 0xDD, + 0xaa, + 0xbb, + 0xcc, + 0xdd, 0b1001_0110, - 0xCC, - 0xDD, + 0xcc, + 0xdd, 0x02, - 0xBE, - 0xEF, - 0xFF, + 0xbe, + 0xef, + 0xff, ] .to_vec(); // Read - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); assert_eq!( TestStruct { - field_a: 0xFF, + field_a: 0xff, field_b: 0b0000_0010, field_c: 0b0001_0110, - field_d: native_endian!(0xBBAAu16), - field_e: 0xCCDDu16, + field_d: native_endian!(0xbbaau16), + field_e: 0xccddu16, field_f: NestedDeku { nest_a: 0b00_100101, nest_b: 0b10, inner: DoubleNestedDeku { - data: native_endian!(0xDDCCu16) + data: native_endian!(0xddccu16) } }, vec_len: 0x02, - vec_data: vec![0xBE, 0xEF], - rest: 0xFF + vec_data: vec![0xbe, 0xef], + rest: 0xff }, ret_read ); @@ -168,11 +169,11 @@ fn test_raw_identifiers_struct() { pub r#type: u8, } - let test_data: Vec = [0xFF].to_vec(); + let test_data: Vec = [0xff].to_vec(); // Read - let ret_read = TestStruct::try_from(test_data.as_ref()).unwrap(); - assert_eq!(TestStruct { r#type: 0xFF }, ret_read); + let ret_read = TestStruct::try_from(test_data.as_slice()).unwrap(); + assert_eq!(TestStruct { r#type: 0xff }, ret_read); // Write let ret_write: Vec = ret_read.try_into().unwrap(); diff --git a/tests/test_tuple.rs b/tests/test_tuple.rs new file mode 100644 index 00000000..be955296 --- /dev/null +++ b/tests/test_tuple.rs @@ -0,0 +1,24 @@ +use std::convert::{TryFrom, TryInto}; + +use deku::prelude::*; +use hexlit::hex; +use rstest::*; + +#[derive(PartialEq, Debug, DekuRead, DekuWrite)] +#[deku(type = "u8")] +enum TestEnum { + #[deku(id = "1")] + VarA((u8, u16)), +} + +#[rstest(input,expected, + case(&mut hex!("01ABFFAA"), TestEnum::VarA((0xAB, 0xAAFF))), +)] +fn test_enum(input: &mut [u8], expected: TestEnum) { + let input = input.to_vec(); + let ret_read = TestEnum::try_from(input.as_slice()).unwrap(); + assert_eq!(expected, ret_read); + + let ret_write: Vec = ret_read.try_into().unwrap(); + assert_eq!(input, ret_write); +}