Skip to content

Commit

Permalink
Add bits feature to deku-derive
Browse files Browse the repository at this point in the history
* Use bits feature to correctly disallow bit related attributes from being
  used
  • Loading branch information
wcampbell0x2a committed Sep 4, 2024
1 parent cb80720 commit 32b6779
Show file tree
Hide file tree
Showing 29 changed files with 275 additions and 31 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ jobs:
- run: cargo test --all
# run examples
- run: cargo run --example 2>&1 | grep -P ' ' | awk '{print $1}' | xargs -i cargo run --example {}
# test with no bits feature (don't test docs)
- run: cargo test --lib --examples --tests --features std --no-default-features

# Only build on MSRV, since trybuild will fail on older version
build-msrv:
Expand Down
28 changes: 28 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,31 @@ workspace = true
# Triggers in macro generated code of darling
# /~https://github.com/rust-lang/rust-clippy/issues/12643
manual-unwrap-or-default = "allow"

[[example]]
name = "custom_reader_and_writer"
required-features = ["bits"]

[[example]]
name = "deku_input"

[[example]]
name = "enums_catch_all"
required-features = ["bits"]

[[example]]
name = "enums"

[[example]]
name = "example"
required-features = ["bits"]

[[example]]
name = "ipv4"
required-features = ["bits"]

[[example]]
name = "many"

[[example]]
name = "read_all"
43 changes: 40 additions & 3 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ struct DekuData {
id_type: Option<TokenStream>,

/// enum only: bit size of the enum `id`
#[cfg(feature = "bits")]
bits: Option<Num>,

/// enum only: byte size of the enum `id`
Expand Down Expand Up @@ -198,6 +199,7 @@ impl DekuData {
magic: receiver.magic,
id: receiver.id,
id_type: receiver.id_type?,
#[cfg(feature = "bits")]
bits: receiver.bits,
bytes: receiver.bytes,
seek_rewind: receiver.seek_rewind,
Expand All @@ -224,7 +226,7 @@ impl DekuData {
match data.data {
ast::Data::Struct(_) => {
// Validate id_* attributes are being used on an enum
if data.id_type.is_some() {
let ret = if data.id_type.is_some() {
Err(cerror(
data.id_type.span(),
"`id_type` only supported on enum",
Expand All @@ -233,11 +235,16 @@ impl DekuData {
Err(cerror(data.id.span(), "`id` only supported on enum"))
} else if data.bytes.is_some() {
Err(cerror(data.bytes.span(), "`bytes` only supported on enum"))
} else if data.bits.is_some() {
Err(cerror(data.bits.span(), "`bits` only supported on enum"))
} else {
Ok(())
};

#[cfg(feature = "bits")]
if ret.is_ok() && data.bits.is_some() {
return Err(cerror(data.bits.span(), "`bits` only supported on enum"));
}

ret
}
ast::Data::Enum(_) => {
// Validate `id_type` or `id` is specified
Expand All @@ -257,6 +264,7 @@ impl DekuData {
}

// Validate `id_*` used correctly
#[cfg(feature = "bits")]
if data.id.is_some() && data.bits.is_some() {
return Err(cerror(
data.ident.span(),
Expand All @@ -271,6 +279,7 @@ impl DekuData {
}

// Validate either `bits` or `bytes` is specified
#[cfg(feature = "bits")]
if data.bits.is_some() && data.bytes.is_some() {
return Err(cerror(
data.bits.span(),
Expand Down Expand Up @@ -336,7 +345,10 @@ impl<'a> TryFrom<&'a DekuData> for DekuDataEnum<'a> {

let id_args = crate::macros::gen_id_args(
deku_data.endian.as_ref(),
#[cfg(feature = "bits")]
deku_data.bits.as_ref(),
#[cfg(not(feature = "bits"))]
None,
deku_data.bytes.as_ref(),
)?;

Expand Down Expand Up @@ -393,6 +405,7 @@ struct FieldData {
endian: Option<syn::LitStr>,

/// field bit size
#[cfg(feature = "bits")]
bits: Option<Num>,

/// field byte size
Expand All @@ -402,6 +415,7 @@ struct FieldData {
count: Option<TokenStream>,

/// tokens providing the number of bits for the length of the container
#[cfg(feature = "bits")]
bits_read: Option<TokenStream>,

/// tokens providing the number of bytes for the length of the container
Expand Down Expand Up @@ -432,12 +446,14 @@ struct FieldData {
skip: bool,

/// pad a number of bits before
#[cfg(feature = "bits")]
pad_bits_before: Option<TokenStream>,

/// pad a number of bytes before
pad_bytes_before: Option<TokenStream>,

/// pad a number of bits after
#[cfg(feature = "bits")]
pad_bits_after: Option<TokenStream>,

/// pad a number of bytes after
Expand Down Expand Up @@ -486,9 +502,11 @@ impl FieldData {
ident: receiver.ident,
ty: receiver.ty,
endian: receiver.endian,
#[cfg(feature = "bits")]
bits: receiver.bits,
bytes: receiver.bytes,
count: receiver.count?,
#[cfg(feature = "bits")]
bits_read: receiver.bits_read?,
bytes_read: receiver.bytes_read?,
until: receiver.until?,
Expand All @@ -499,8 +517,10 @@ impl FieldData {
reader: receiver.reader?,
writer: receiver.writer?,
skip: receiver.skip,
#[cfg(feature = "bits")]
pad_bits_before: receiver.pad_bits_before?,
pad_bytes_before: receiver.pad_bytes_before?,
#[cfg(feature = "bits")]
pad_bits_after: receiver.pad_bits_after?,
pad_bytes_after: receiver.pad_bytes_after?,
temp: receiver.temp,
Expand All @@ -524,6 +544,7 @@ impl FieldData {

fn validate(data: &FieldData) -> Result<(), TokenStream> {
// Validate either `read_bytes` or `read_bits` is specified
#[cfg(feature = "bits")]
if data.bits_read.is_some() && data.bytes_read.is_some() {
return Err(cerror(
data.bits_read.span(),
Expand All @@ -532,6 +553,7 @@ impl FieldData {
}

// Validate either `count` or `bits_read`/`bytes_read` is specified
#[cfg(feature = "bits")]
if data.count.is_some() && (data.bits_read.is_some() || data.bytes_read.is_some()) {
if data.bits_read.is_some() {
return Err(cerror(
Expand All @@ -546,7 +568,16 @@ impl FieldData {
}
}

#[cfg(not(feature = "bits"))]
if data.count.is_some() && data.bytes_read.is_some() {
return Err(cerror(
data.count.span(),
"conflicting: both `count` and `bytes_read` specified on field",
));
}

// Validate either `bits` or `bytes` is specified
#[cfg(feature = "bits")]
if data.bits.is_some() && data.bytes.is_some() {
// FIXME: Use `Span::join` once out of nightly
return Err(cerror(
Expand All @@ -565,6 +596,7 @@ impl FieldData {
}

// Validate usage of read_all
#[cfg(feature = "bits")]
if data.read_all
&& (data.until.is_some()
|| data.count.is_some()
Expand Down Expand Up @@ -707,6 +739,7 @@ struct DekuReceiver {
id_type: Result<Option<TokenStream>, ReplacementError>,

/// enum only: bit size of the enum `id`
#[cfg(feature = "bits")]
#[darling(default)]
bits: Option<Num>,

Expand Down Expand Up @@ -816,6 +849,7 @@ struct DekuFieldReceiver {
endian: Option<syn::LitStr>,

/// field bit size
#[cfg(feature = "bits")]
#[darling(default)]
bits: Option<Num>,

Expand All @@ -828,6 +862,7 @@ struct DekuFieldReceiver {
count: Result<Option<TokenStream>, ReplacementError>,

/// tokens providing the number of bits for the length of the container
#[cfg(feature = "bits")]
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
bits_read: Result<Option<TokenStream>, ReplacementError>,

Expand Down Expand Up @@ -871,6 +906,7 @@ struct DekuFieldReceiver {
skip: bool,

/// pad a number of bits before
#[cfg(feature = "bits")]
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
pad_bits_before: Result<Option<TokenStream>, ReplacementError>,

Expand All @@ -879,6 +915,7 @@ struct DekuFieldReceiver {
pad_bytes_before: Result<Option<TokenStream>, ReplacementError>,

/// pad a number of bits after
#[cfg(feature = "bits")]
#[darling(default = "default_res_opt", map = "map_litstr_as_tokenstream")]
pad_bits_after: Result<Option<TokenStream>, ReplacementError>,

Expand Down
84 changes: 64 additions & 20 deletions deku-derive/src/macros/deku_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use quote::quote;

use crate::macros::{
gen_ctx_types_and_arg, gen_field_args, gen_internal_field_ident, gen_internal_field_idents,
gen_type_from_ctx_id, pad_bits, token_contains_string, wrap_default_ctx,
gen_type_from_ctx_id, token_contains_string, wrap_default_ctx,
};
use crate::{DekuData, DekuDataEnum, DekuDataStruct, FieldData, Id};

Expand Down Expand Up @@ -558,6 +558,7 @@ fn emit_bit_byte_offsets(
(bit_offset, byte_offset)
}

#[cfg(feature = "bits")]
fn emit_padding(bit_size: &TokenStream) -> TokenStream {
let crate_ = super::get_crate_name();
quote! {
Expand Down Expand Up @@ -585,6 +586,29 @@ fn emit_padding(bit_size: &TokenStream) -> TokenStream {
}
}

// TODO: if this is a simple calculation such as "8 + 2", this could be const
#[cfg(not(feature = "bits"))]
fn emit_padding_bytes(bit_size: &TokenStream) -> TokenStream {
let crate_ = super::get_crate_name();
quote! {
{
use core::convert::TryFrom;
extern crate alloc;
use alloc::borrow::Cow;
let __deku_pad = usize::try_from(#bit_size).map_err(|e|
::#crate_::DekuError::InvalidParam(Cow::from(format!(
"Invalid padding param \"({})\": cannot convert to usize",
stringify!(#bit_size)
)))
)?;


let mut buf = vec![0; __deku_pad];
let _ = __deku_reader.read_bytes(__deku_pad, &mut buf)?;
}
}
}

fn emit_field_read(
input: &DekuData,
i: usize,
Expand All @@ -602,6 +626,7 @@ fn emit_field_read(
// fields to check usage of bit/byte offset
let field_check_vars = [
&f.count,
#[cfg(feature = "bits")]
&f.bits_read,
&f.bytes_read,
&f.until,
Expand Down Expand Up @@ -705,7 +730,10 @@ fn emit_field_read(
} else {
let read_args = gen_field_args(
field_endian,
#[cfg(feature = "bits")]
f.bits.as_ref(),
#[cfg(not(feature = "bits"))]
None,
f.bytes.as_ref(),
f.ctx.as_ref(),
)?;
Expand Down Expand Up @@ -751,17 +779,6 @@ fn emit_field_read(
)?
}
}
} else if let Some(field_bits) = &f.bits_read {
quote! {
{
use core::borrow::Borrow;
#type_as_deku_read::from_reader_with_ctx
(
__deku_reader,
(::#crate_::ctx::Limit::new_bit_size(::#crate_::ctx::BitSize(usize::try_from(*((#field_bits).borrow()))?)), (#read_args))
)?
}
}
} else if let Some(field_bytes) = &f.bytes_read {
quote! {
{
Expand Down Expand Up @@ -795,27 +812,54 @@ fn emit_field_read(
}
}
} else {
quote! {
#type_as_deku_read::from_reader_with_ctx
(
__deku_reader,
(#read_args)
)?
let mut ret = quote! {};

#[cfg(feature = "bits")]
if let Some(field_bits) = &f.bits_read {
ret.extend(quote! {
{
use core::borrow::Borrow;
#type_as_deku_read::from_reader_with_ctx
(
__deku_reader,
(::#crate_::ctx::Limit::new_bit_size(::#crate_::ctx::BitSize(usize::try_from(*((#field_bits).borrow()))?)), (#read_args))
)?
}
})
}
if ret.is_empty() {
ret.extend(quote! {
#type_as_deku_read::from_reader_with_ctx
(
__deku_reader,
(#read_args)
)?
})
}

ret
}
};

let pad_bits_before = pad_bits(
#[cfg(feature = "bits")]
let pad_bits_before = crate::macros::pad_bits(
f.pad_bits_before.as_ref(),
f.pad_bytes_before.as_ref(),
emit_padding,
);
let pad_bits_after = pad_bits(
#[cfg(feature = "bits")]
let pad_bits_after = crate::macros::pad_bits(
f.pad_bits_after.as_ref(),
f.pad_bytes_after.as_ref(),
emit_padding,
);

#[cfg(not(feature = "bits"))]
let pad_bits_before = crate::macros::pad_bytes(f.pad_bytes_before.as_ref(), emit_padding_bytes);

#[cfg(not(feature = "bits"))]
let pad_bits_after = crate::macros::pad_bytes(f.pad_bytes_after.as_ref(), emit_padding_bytes);

let field_read_normal = quote! {
let __deku_value = #field_read_func;
let __deku_value: #field_type = #field_map(__deku_value)?;
Expand Down
Loading

0 comments on commit 32b6779

Please sign in to comment.