Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Convert BitSize to a enum with two variants, Bits and Bytes #138

Merged
merged 1 commit into from
Nov 26, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions deku-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,10 @@ struct DekuData {
id_type: Option<syn::Ident>,

/// enum only: bit size of the enum `id`
/// `bytes` is converted to `bits` if provided
bits: Option<usize>,

/// enum only: byte size of the enum `id`
bytes: Option<usize>,
}

impl DekuData {
Expand Down Expand Up @@ -80,8 +82,6 @@ impl DekuData {
.transpose()
.map_err(|e| e.to_compile_error())?;

let bits = receiver.bytes.map(|b| b * 8).or(receiver.bits);

Ok(Self {
vis: receiver.vis,
ident: receiver.ident,
Expand All @@ -93,7 +93,8 @@ impl DekuData {
magic: receiver.magic,
id: receiver.id,
id_type: receiver.id_type,
bits,
bits: receiver.bits,
bytes: receiver.bytes,
})
}

Expand Down Expand Up @@ -198,6 +199,9 @@ struct FieldData {
/// field bit size
bits: Option<usize>,

/// field byte size
bytes: Option<usize>,

/// tokens providing the length of the container
count: Option<TokenStream>,

Expand Down Expand Up @@ -240,8 +244,6 @@ impl FieldData {
FieldData::validate(&receiver)
.map_err(|(span, msg)| syn::Error::new(span, msg).to_compile_error())?;

let bits = receiver.bytes.map(|b| b * 8).or(receiver.bits);

let bits_read = receiver
.bytes_read
.map(|tokens| quote! { (#tokens) * 8 })
Expand All @@ -259,7 +261,8 @@ impl FieldData {
ident: receiver.ident,
ty: receiver.ty,
endian: receiver.endian,
bits,
bits: receiver.bits,
bytes: receiver.bytes,
count: receiver.count,
bits_read,
until: receiver.until,
Expand Down
6 changes: 3 additions & 3 deletions deku-derive/src/macros/deku_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ fn emit_enum(input: &DekuData) -> Result<TokenStream, syn::Error> {
let id = input.id.as_ref();
let id_type = input.id_type.as_ref();

let id_args = gen_id_args(input.endian.as_ref(), input.bits)?;
let id_args = gen_id_args(input.endian.as_ref(), input.bits, input.bytes)?;

let magic_read = emit_magic_read(input)?;

Expand Down Expand Up @@ -368,7 +368,7 @@ fn emit_field_read(
let field_read_func = if field_reader.is_some() {
quote! { #field_reader }
} else {
let read_args = gen_field_args(field_endian, f.bits, f.ctx.as_ref())?;
let read_args = gen_field_args(field_endian, f.bits, f.bytes, f.ctx.as_ref())?;

// The container limiting options are special, we need to generate `(limit, (other, ..))` for them.
// These have a problem where when it isn't a copy type, the field will be moved.
Expand All @@ -388,7 +388,7 @@ fn emit_field_read(
quote! {
{
use core::borrow::Borrow;
DekuRead::read(rest, (deku::ctx::Limit::new_bits(deku::ctx::BitSize(usize::try_from(*((#field_bits).borrow()))?)), (#read_args)))
DekuRead::read(rest, (deku::ctx::Limit::new_bits(deku::ctx::Size::Bits(usize::try_from(*((#field_bits).borrow()))?)), (#read_args)))
}
}
} else if let Some(field_until) = &f.until {
Expand Down
4 changes: 2 additions & 2 deletions deku-derive/src/macros/deku_write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ fn emit_enum(input: &DekuData) -> Result<TokenStream, syn::Error> {
let id = input.id.as_ref();
let id_type = input.id_type.as_ref();

let id_args = gen_id_args(input.endian.as_ref(), input.bits)?;
let id_args = gen_id_args(input.endian.as_ref(), input.bits, input.bytes)?;

let mut variant_writes = vec![];
let mut variant_updates = vec![];
Expand Down Expand Up @@ -417,7 +417,7 @@ fn emit_field_write(
let field_write_func = if field_writer.is_some() {
quote! { #field_writer }
} else {
let write_args = gen_field_args(field_endian, f.bits, f.ctx.as_ref())?;
let write_args = gen_field_args(field_endian, f.bits, f.bytes, f.ctx.as_ref())?;

quote! { #object_prefix #field_ident.write(output, (#write_args)) }
};
Expand Down
21 changes: 14 additions & 7 deletions deku-derive/src/macros/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,13 +160,18 @@ fn gen_ctx_types_and_arg(
}

/// Generate argument for `id`:
/// `#deku(endian = "big", bits = "1")` -> `Endian::Big, BitSize(1)`
fn gen_id_args(endian: Option<&syn::LitStr>, bits: Option<usize>) -> syn::Result<TokenStream> {
/// `#deku(endian = "big", bits = "1")` -> `Endian::Big, Size::Bits(1)`
fn gen_id_args(
endian: Option<&syn::LitStr>,
bits: Option<usize>,
bytes: Option<usize>,
) -> syn::Result<TokenStream> {
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {deku::ctx::BitSize(#n)});
let bits = bits.map(|n| quote! {deku::ctx::Size::Bits(#n)});
let bytes = bytes.map(|n| quote! {deku::ctx::Size::Bytes(#n)});

// FIXME: Should be `into_iter` here, see /~https://github.com/rust-lang/rust/issues/66145.
let id_args = [endian.as_ref(), bits.as_ref()]
let id_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
Expand All @@ -179,18 +184,20 @@ fn gen_id_args(endian: Option<&syn::LitStr>, bits: Option<usize>) -> syn::Result

/// Generate argument for fields:
///
/// `#deku(endian = "big", bits = "1", ctx = "a")` -> `Endian::Big, BitSize(1), a`
/// `#deku(endian = "big", bits = "1", ctx = "a")` -> `Endian::Big, Size::Bits(1), a`
fn gen_field_args(
endian: Option<&syn::LitStr>,
bits: Option<usize>,
bytes: Option<usize>,
ctx: Option<&Punctuated<syn::Expr, syn::token::Comma>>,
) -> syn::Result<TokenStream> {
let endian = endian.map(gen_endian_from_str).transpose()?;
let bits = bits.map(|n| quote! {deku::ctx::BitSize(#n)});
let bits = bits.map(|n| quote! {deku::ctx::Size::Bits(#n)});
let bytes = bytes.map(|n| quote! {deku::ctx::Size::Bytes(#n)});
let ctx = ctx.map(|c| quote! {#c});

// FIXME: Should be `into_iter` here, see /~https://github.com/rust-lang/rust/issues/66145.
let field_args = [endian.as_ref(), bits.as_ref(), ctx.as_ref()]
let field_args = [endian.as_ref(), bits.as_ref(), bytes.as_ref(), ctx.as_ref()]
.iter()
.filter_map(|i| *i)
.collect::<Vec<_>>();
Expand Down
10 changes: 5 additions & 5 deletions examples/custom_reader_and_writer.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
use deku::ctx::BitSize;
use deku::ctx::Size;
use deku::prelude::*;
use std::convert::TryInto;

fn bit_flipper_read(
field_a: u8,
rest: &BitSlice<Msb0, u8>,
bit_size: BitSize,
bit_size: Size,
) -> Result<(&BitSlice<Msb0, u8>, u8), DekuError> {
// Access to previously read fields
println!("field_a = 0x{:X}", field_a);
Expand All @@ -29,7 +29,7 @@ fn bit_flipper_write(
field_a: u8,
field_b: u8,
output: &mut BitVec<Msb0, u8>,
bit_size: BitSize,
bit_size: Size,
) -> Result<(), DekuError> {
// Access to previously written fields
println!("field_a = 0x{:X}", field_a);
Expand All @@ -51,8 +51,8 @@ struct DekuTest {
field_a: u8,

#[deku(
reader = "bit_flipper_read(*field_a, rest, BitSize(8))",
writer = "bit_flipper_write(*field_a, *field_b, output, BitSize(8))"
reader = "bit_flipper_read(*field_a, rest, Size::Bits(8))",
writer = "bit_flipper_write(*field_a, *field_b, output, Size::Bits(8))"
)]
field_b: u8,
}
Expand Down
4 changes: 2 additions & 2 deletions src/attributes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -631,7 +631,7 @@ struct Type1 {
// is equivalent to

struct Type1 {
#[deku(ctx = "Endian::Big, BitSize(1)")]
#[deku(ctx = "Endian::Big, Size::Bits(1)")]
field: u8,
}
```
Expand All @@ -650,7 +650,7 @@ struct Type1 {
struct Type1 {
#[deku(ctx = "Endian::Big")]
field_a: u16,
#[deku(ctx = "Endian::Big, BitSize(5), *field_a")] // endian is prepended
#[deku(ctx = "Endian::Big, Size::Bits(5), *field_a")] // endian is prepended
field_b: SubType,
}
```
Expand Down
96 changes: 52 additions & 44 deletions src/ctx.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
//! Types for context representation
//! See [ctx attribute](super::attributes#ctx) for more information.

use crate::error::DekuError;
use core::marker::PhantomData;
use core::ops::{Deref, DerefMut};
use core::str::FromStr;

#[cfg(feature = "alloc")]
use alloc::format;

/// An endian
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Endian {
Expand Down Expand Up @@ -83,7 +86,7 @@ pub enum Limit<T, Predicate: FnMut(&T) -> bool> {
Until(Predicate, PhantomData<T>),

/// Read until a given quantity of bits have been read
Bits(BitSize),
Size(Size),
}

impl<T> From<usize> for Limit<T, fn(&T) -> bool> {
Expand All @@ -98,9 +101,9 @@ impl<T, Predicate: for<'a> FnMut(&'a T) -> bool> From<Predicate> for Limit<T, Pr
}
}

impl<T> From<BitSize> for Limit<T, fn(&T) -> bool> {
fn from(bits: BitSize) -> Self {
Limit::Bits(bits)
impl<T> From<Size> for Limit<T, fn(&T) -> bool> {
fn from(size: Size) -> Self {
Limit::Size(size)
}
}

Expand All @@ -120,72 +123,77 @@ impl<T> Limit<T, fn(&T) -> bool> {
}

/// Constructs a new Limit that reads until the given number of bits have been read
pub fn new_bits(bits: BitSize) -> Self {
pub fn new_bits(bits: Size) -> Self {
bits.into()
}
}

/// The number bits in a field
/// The size of a field
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct BitSize(pub usize);
pub enum Size {
/// bit size
Bits(usize),
/// byte size
Bytes(usize),
}

impl BitSize {
impl Size {
/// Convert the size in bytes to a bit size.
/// # Examples
/// ```rust
/// # use std::mem::size_of;
/// # use deku::ctx::BitSize;
///
/// assert_eq!(BitSize::with_byte_size(1), BitSize(8));
/// ```
///
/// # Panic
/// Panic if `byte_size * 8` is greater than `usize::MAX`.
pub fn with_byte_size(byte_size: usize) -> Self {
Self(byte_size.checked_mul(8).expect("bit size overflow"))
fn bits_from_bytes(byte_size: usize) -> Self {
Self::Bits(byte_size.checked_mul(8).expect("bit size overflow"))
}

/// Returns the bit size of a type.
/// # Examples
/// ```rust
/// # use deku::ctx::BitSize;
/// # use deku::ctx::Size;
///
/// assert_eq!(BitSize::of::<i32>(), BitSize(4 * 8));
/// assert_eq!(Size::of::<i32>(), Size::Bits(4 * 8));
/// ```
///
/// # Panics
/// Panic if the bit size of given type is greater than `usize::MAX`
pub fn of<T>() -> Self {
Self::with_byte_size(core::mem::size_of::<T>())
Self::bits_from_bytes(core::mem::size_of::<T>())
}

/// Returns the bit size of the pointed-to value
pub fn of_val<T: ?Sized>(val: &T) -> Self {
Self::with_byte_size(core::mem::size_of_val(val))
}
}

impl Into<usize> for BitSize {
fn into(self) -> usize {
self.0
Self::bits_from_bytes(core::mem::size_of_val(val))
}
}

impl From<usize> for BitSize {
fn from(n: usize) -> Self {
Self(n)
}
}

impl Deref for BitSize {
type Target = usize;

fn deref(&self) -> &Self::Target {
&self.0
/// Returns the size in bits of a Size
///
/// # Panics
/// Panic if the bit size of Size::Bytes(n) is greater than `usize::MAX`
pub fn bit_size(&self) -> usize {
match *self {
Size::Bits(size) => size,
Size::Bytes(size) => size.checked_mul(8).expect("bit size overflow"),
}
}
}

impl DerefMut for BitSize {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
/// Returns the size in bytes of a Size
///
/// # Panics
/// Panic if the bit size of Size::Bytes(n) is greater than `usize::MAX`
pub fn byte_size(&self) -> Result<usize, DekuError> {
match *self {
Size::Bits(size) => {
if size % 8 == 0 {
Ok(size / 8)
} else {
Err(DekuError::InvalidParam(format!(
"Bit size of {} is not a multiple of 8.
Cannot be represented in bytes",
size
)))
}
}
Size::Bytes(size) => Ok(size),
}
}
}
Loading