From f1f18347ddfff509a58acea2a815c40fe86fd56c Mon Sep 17 00:00:00 2001 From: Greg Brimble Date: Mon, 10 Jun 2024 19:42:50 +0100 Subject: [PATCH] Revert #5632 (#6002) --- .changeset/fluffy-bees-shout.md | 5 +++++ packages/wrangler/src/pages/upload.tsx | 27 ++++++++++---------------- 2 files changed, 15 insertions(+), 17 deletions(-) create mode 100644 .changeset/fluffy-bees-shout.md diff --git a/.changeset/fluffy-bees-shout.md b/.changeset/fluffy-bees-shout.md new file mode 100644 index 000000000000..1ea3858d1766 --- /dev/null +++ b/.changeset/fluffy-bees-shout.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +Revert a change in 3.60.0 which incorrectly batched assets for Pages uploads (/~https://github.com/cloudflare/workers-sdk/pull/5632). diff --git a/packages/wrangler/src/pages/upload.tsx b/packages/wrangler/src/pages/upload.tsx index a5a3f631412d..e5394b4365a8 100644 --- a/packages/wrangler/src/pages/upload.tsx +++ b/packages/wrangler/src/pages/upload.tsx @@ -205,23 +205,16 @@ export const upload = async ( const doUpload = async (): Promise => { // Populate the payload only when actually uploading (this is limited to 3 concurrent uploads at 50 MiB per bucket meaning we'd only load in a max of ~150 MiB) // This is so we don't run out of memory trying to upload the files. - const payload: UploadPayloadFile[] = []; - - for (let i = 0; i < bucket.files.length; i += 1000) { - // only read up to 1000 files, from disk, at a time to avoid `EMFILE` error (on Windows) - payload.push( - ...(await Promise.all( - bucket.files.slice(i * 1000, (i + 1) * 1000).map(async (file) => ({ - key: file.hash, - value: (await readFile(file.path)).toString("base64"), - metadata: { - contentType: file.contentType, - }, - base64: true, - })) - )) - ); - } + const payload: UploadPayloadFile[] = await Promise.all( + bucket.files.map(async (file) => ({ + key: file.hash, + value: (await readFile(file.path)).toString("base64"), + metadata: { + contentType: file.contentType, + }, + base64: true, + })) + ); try { logger.debug("POST /pages/assets/upload");