Skip to content

Commit

Permalink
WIP: remove more quotations
Browse files Browse the repository at this point in the history
  • Loading branch information
wcampbell0x2a committed Dec 31, 2023
1 parent 26c00d3 commit 1b3e371
Show file tree
Hide file tree
Showing 4 changed files with 191 additions and 31 deletions.
208 changes: 184 additions & 24 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use proc_macro2::TokenStream;
use quote::quote;
use syn::punctuated::Punctuated;
use syn::spanned::Spanned;
use syn::{parse_quote, Expr, Lit, LitStr};

use crate::macros::deku_read::emit_deku_read;
use crate::macros::deku_write::emit_deku_write;
Expand Down Expand Up @@ -640,7 +641,7 @@ struct DekuReceiver {
id: Option<Id>,

/// enum only: type of the enum `id`
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
id_type: Result<Option<TokenStream>, ReplacementError>,

/// enum only: bit size of the enum `id`
Expand Down Expand Up @@ -671,7 +672,11 @@ fn apply_replacements(input: &syn::LitStr) -> Result<Cow<'_, syn::LitStr>, Repla
.replace("deku::reader", "__deku_reader")
.replace("deku::writer", "__deku_writer")
.replace("deku::bit_offset", "__deku_bit_offset")
.replace("deku::byte_offset", "__deku_byte_offset");
.replace("deku::byte_offset", "__deku_byte_offset")
.replace("deku :: reader", "__deku_reader")
.replace("deku :: writer", "__deku_writer")
.replace("deku :: bit_offset", "__deku_bit_offset")
.replace("deku :: byte_offset", "__deku_byte_offset");

Ok(Cow::Owned(syn::LitStr::new(&input_str, input.span())))
}
Expand All @@ -686,7 +691,7 @@ fn map_option_litstr(input: Option<syn::LitStr>) -> Result<Option<syn::LitStr>,

/// Parse a TokenStream from an Option<LitStr>
/// Also replaces any namespaced variables to internal variables found in `input`
fn map_litstr_as_tokenstream(
fn map_litstr_as_tokenstream_id_pat(
input: Option<syn::LitStr>,
) -> Result<Option<TokenStream>, ReplacementError> {
Ok(match input {
Expand All @@ -701,6 +706,161 @@ fn map_litstr_as_tokenstream(
})
}

/// Parse a TokenStream from an Option<LitStr>
/// Also replaces any namespaced variables to internal variables found in `input`
fn map_litstr_as_tokenstream(
input: Option<syn::Expr>,
) -> Result<Option<TokenStream>, ReplacementError> {
let r = match input {
Some(Expr::Call(expr_call)) => {
let span = expr_call.span();
let value = expr_call.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Path(expr_path)) => {
let span = expr_path.span();
let value = expr_path.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Lit(expr_lit)) => match expr_lit.lit {
Lit::Str(s) => {
let v = apply_replacements(&s)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
// TODO: preserve hex!
Lit::Int(i) => {
let digit = i.base10_digits();
let span = i.span();
let lit = LitStr::new(&digit, span);
let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
_ => todo!("not a Lit::Str or Lit::Int: {:?}", expr_lit.lit),
},
Some(Expr::Binary(expr_binary)) => {
let span = expr_binary.span();
let value = expr_binary.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::MethodCall(expr_method)) => {
let span = expr_method.span();
let value = expr_method.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Paren(expr_paren)) => {
let span = expr_paren.span();
let value = expr_paren.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Closure(expr_closure)) => {
let span = expr_closure.span();
let value = expr_closure.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Range(expr_range)) => {
let span = expr_range.span();
let value = expr_range.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Unary(expr_unary)) => {
let span = expr_unary.span();
let value = expr_unary.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Cast(expr_cast)) => {
let span = expr_cast.span();
let value = expr_cast.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Infer(expr_infer)) => {
let span = expr_infer.span();
let value = expr_infer.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(Expr::Repeat(expr_repeat)) => {
let span = expr_repeat.span();
let value = expr_repeat.to_token_stream().to_string();
let lit = LitStr::new(&value, span);

let v = apply_replacements(&lit)?;
let v = v
.parse::<TokenStream>()
.expect("could not parse token stream");
Some(v)
}
Some(_) => todo!("{:?}", input),
None => None,
};

return Ok(r);
}

/// Generate field name which supports both un-named/named structs/enums
/// `ident` is Some if the container has named fields
/// `index` is the numerical index of the current field used in un-named containers
Expand Down Expand Up @@ -745,86 +905,86 @@ struct DekuFieldReceiver {
bytes: Option<Num>,

/// tokens providing the length of the container
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
count: Result<Option<TokenStream>, ReplacementError>,

/// tokens providing the number of bits for the length of the container
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
bits_read: Result<Option<TokenStream>, ReplacementError>,

/// tokens providing the number of bytes for the length of the container
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
bytes_read: Result<Option<TokenStream>, ReplacementError>,

/// a predicate to decide when to stop reading elements into the container
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
until: Result<Option<TokenStream>, ReplacementError>,

/// apply a function to the field after it's read
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
map: Result<Option<TokenStream>, ReplacementError>,

/// context passed to the field.
/// A comma separated argument list.
// TODO: The type of it should be `Punctuated<Expr, Comma>`
// https://github.com/TedDriggs/darling/pull/98
#[darling(default = "default_res_opt", map = "map_option_litstr")]
#[darling(default = "default_res_opt", map = map_option_litstr)]
ctx: Result<Option<syn::LitStr>, ReplacementError>,

/// map field when updating struct
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
update: Result<Option<TokenStream>, ReplacementError>,

/// custom field reader code
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
reader: Result<Option<TokenStream>, ReplacementError>,

/// custom field writer code
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
writer: Result<Option<TokenStream>, ReplacementError>,

/// skip field reading/writing
#[darling(default)]
skip: bool,

/// pad a number of bits before
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
pad_bits_before: Result<Option<TokenStream>, ReplacementError>,

/// pad a number of bytes before
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
pad_bytes_before: Result<Option<TokenStream>, ReplacementError>,

/// pad a number of bits after
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
pad_bits_after: Result<Option<TokenStream>, ReplacementError>,

/// pad a number of bytes after
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
pad_bytes_after: Result<Option<TokenStream>, ReplacementError>,

/// read field as temporary value, isn't stored
#[darling(default)]
temp: bool,

/// write given value of temp field
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream_id_pat)]
temp_value: Result<Option<TokenStream>, ReplacementError>,

/// default value code when used with skip
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
default: Result<Option<TokenStream>, ReplacementError>,

/// condition to parse field
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
cond: Result<Option<TokenStream>, ReplacementError>,

// assertion on field
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
assert: Result<Option<TokenStream>, ReplacementError>,

// assert value of field
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
assert_eq: Result<Option<TokenStream>, ReplacementError>,
}

Expand All @@ -837,19 +997,19 @@ struct DekuVariantReceiver {
discriminant: Option<syn::Expr>,

/// custom variant reader code
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
reader: Result<Option<TokenStream>, ReplacementError>,

/// custom variant reader code
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream)]
writer: Result<Option<TokenStream>, ReplacementError>,

/// variant `id` value
#[darling(default)]
id: Option<Id>,

/// variant `id_pat` value
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
#[darling(default = "default_res_opt", map = map_litstr_as_tokenstream_id_pat)]
id_pat: Result<Option<TokenStream>, ReplacementError>,

/// variant `id` value
Expand Down
4 changes: 2 additions & 2 deletions examples/custom_reader_and_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ struct DekuTest {
field_a: u8,

#[deku(
reader = "bit_flipper_read(*field_a, deku::reader, BitSize(8))",
writer = "bit_flipper_write(*field_a, *field_b, deku::writer, BitSize(8))"
reader = bit_flipper_read(*field_a, deku::reader, BitSize(8)),
writer = bit_flipper_write(*field_a, *field_b, deku::writer, BitSize(8)),
)]
field_b: u8,
}
Expand Down
8 changes: 4 additions & 4 deletions examples/enums_catch_all.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,18 @@ use deku::prelude::*;
use hexlit::hex;

#[derive(Clone, Copy, PartialEq, Eq, Debug, DekuWrite, DekuRead)]
#[deku(id_type = "u8")]
#[deku(id_type = u8)]
#[non_exhaustive]
#[repr(u8)]
pub enum DekuTest {
/// A
#[deku(id = "1")]
#[deku(id = 1)]
A = 0,
/// B
#[deku(id = "2")]
#[deku(id = 2)]
B = 1,
/// C
#[deku(id = "3", default)]
#[deku(id = 3, default)]
C = 2,
}

Expand Down
2 changes: 1 addition & 1 deletion examples/example.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ struct DekuTest {
field_e: u8,
field_f: FieldF,
num_items: u8,
#[deku(count = "num_items", endian = "big")]
#[deku(count = num_items, endian = "big")]
items: Vec<u16>,
}

Expand Down

0 comments on commit 1b3e371

Please sign in to comment.