Skip to content

Commit

Permalink
feat: Remove MAX_DATA_LIMIT (#780)
Browse files Browse the repository at this point in the history
We used to limit the maximum amount of data that can be transferred,
both the total size as well as single blob size.  There is no real
benefit to this, we pass through the reader and do not buffer all this
data at once.  And avoiding to fill up the user's disk space is not
our concern.
  • Loading branch information
flub authored Feb 22, 2023
1 parent c28a378 commit 42a6235
Showing 1 changed file with 1 addition and 14 deletions.
15 changes: 1 addition & 14 deletions src/get.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use crate::protocol::{
};
use crate::tls::{self, Keypair, PeerId};
use abao::decode::AsyncSliceDecoder;
use anyhow::{anyhow, bail, ensure, Result};
use anyhow::{anyhow, bail, Result};
use bytes::BytesMut;
use futures::Future;
use postcard::experimental::max_size::MaxSize;
Expand All @@ -24,8 +24,6 @@ use tracing::{debug, error};

pub use crate::util::Hash;

const MAX_DATA_SIZE: u64 = 1024 * 1024 * 1024;

/// Options for the client
#[derive(Clone, Debug)]
pub struct Options {
Expand Down Expand Up @@ -181,13 +179,6 @@ where
match response.data {
// server is sending over a collection of blobs
Res::FoundCollection { total_blobs_size } => {
ensure!(
total_blobs_size <= MAX_DATA_SIZE,
"size too large: {} > {}",
total_blobs_size,
MAX_DATA_SIZE
);

data_len = total_blobs_size;

// read entire collection data into buffer
Expand All @@ -204,10 +195,6 @@ where
handle_blob_response(blob.hash, reader, &mut in_buffer).await?;

let size = blob_reader.read_size().await?;
anyhow::ensure!(
size <= MAX_DATA_SIZE,
"size too large: {size} > {MAX_DATA_SIZE}"
);
anyhow::ensure!(
size <= remaining_size,
"downloaded more than {total_blobs_size}"
Expand Down

0 comments on commit 42a6235

Please sign in to comment.