Skip to content

Commit

Permalink
feat: add custom max chunk size to client put and putCar options (#1000)
Browse files Browse the repository at this point in the history
Co-authored-by: Alan Shaw <alan.shaw@protocol.ai>
Co-authored-by: Vasco Santos <santos.vasco10@gmail.com>
  • Loading branch information
3 people committed Mar 11, 2022
1 parent 051f12d commit d525324
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 4 deletions.
18 changes: 14 additions & 4 deletions packages/client/src/lib.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ import {

const MAX_PUT_RETRIES = 5
const MAX_CONCURRENT_UPLOADS = 3
const MAX_CHUNK_SIZE = 1024 * 1024 * 10 // chunk to ~10MB CARs
const DEFAULT_CHUNK_SIZE = 1024 * 1024 * 10 // chunk to ~10MB CARs
const MAX_BLOCK_SIZE = 1048576
const MAX_CHUNK_SIZE = 104857600

/** @typedef { import('./lib/interface.js').API } API */
/** @typedef { import('./lib/interface.js').Status} Status */
Expand Down Expand Up @@ -97,9 +99,13 @@ class Web3Storage {
onRootCidReady,
onStoredChunk,
maxRetries = MAX_PUT_RETRIES,
maxChunkSize = DEFAULT_CHUNK_SIZE,
wrapWithDirectory = true,
name
} = {}) {
if (maxChunkSize >= MAX_CHUNK_SIZE || maxChunkSize < MAX_BLOCK_SIZE) {
throw new Error('maximum chunk size must be less than 100MiB and greater than or equal to 1MB')
}
const blockstore = new Blockstore()
try {
const { out, root } = await pack({
Expand All @@ -109,12 +115,12 @@ class Web3Storage {
})),
blockstore,
wrapWithDirectory,
maxChunkSize: 1048576,
maxChunkSize: MAX_BLOCK_SIZE,
maxChildrenPerNode: 1024
})
onRootCidReady && onRootCidReady(root.toString())
const car = await CarReader.fromIterable(out)
return await Web3Storage.putCar({ endpoint, token }, car, { onStoredChunk, maxRetries, name })
return await Web3Storage.putCar({ endpoint, token }, car, { onStoredChunk, maxRetries, maxChunkSize, name })
} finally {
await blockstore.close()
}
Expand All @@ -130,9 +136,13 @@ class Web3Storage {
name,
onStoredChunk,
maxRetries = MAX_PUT_RETRIES,
maxChunkSize = DEFAULT_CHUNK_SIZE,
decoders
} = {}) {
const targetSize = MAX_CHUNK_SIZE
if (maxChunkSize >= MAX_CHUNK_SIZE || maxChunkSize < MAX_BLOCK_SIZE) {
throw new Error('maximum chunk size must be less than 100MiB and greater than or equal to 1MB')
}
const targetSize = maxChunkSize
const url = new URL('car', endpoint)
let headers = Web3Storage.headers(token)

Expand Down
8 changes: 8 additions & 0 deletions packages/client/src/lib/interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,10 @@ export type PutOptions = {
* Maximum times to retry a failed upload. Default: 5
*/
maxRetries?: number
/**
* Maximum chunk size to upload in bytes. Default: 10,485,760
*/
maxChunkSize?: number
/**
* Should input files be wrapped with a directory? Default: true
*
Expand Down Expand Up @@ -138,6 +142,10 @@ export type PutCarOptions = {
* Maximum times to retry a failed upload. Default: 5
*/
maxRetries?: number
/**
* Maximum chunk size to upload in bytes. Default: 10,485,760
*/
maxChunkSize?: number
/**
* Additional IPLD block decoders. Used to interpret the data in the CAR file
* and split it into multiple chunks. Note these are only required if the CAR
Expand Down
49 changes: 49 additions & 0 deletions packages/client/test/put.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,16 @@ describe('put', () => {
}
})

it('errors with wrong max chunk size', async () => {
const client = new Web3Storage({ endpoint, token })
try {
await client.put([], { maxChunkSize: 10 })
assert.unreachable('should have thrown')
} catch (err) {
assert.match(err.message, /maximum chunk size must be less than 100MiB and greater than or equal to 1MB/)
}
})

it('adds files', async () => {
const client = new Web3Storage({ token, endpoint })
const files = prepareFiles()
Expand Down Expand Up @@ -85,6 +95,20 @@ describe('put', () => {
assert.equal(cid, expectedCid, 'returned cid matches the CAR')
})

it('adds files {maxChunkSize: custom-size}', async () => {
const client = new Web3Storage({ token, endpoint })
const files = prepareFiles()
const expectedCid = 'bafybeiep3t2chy6e3dxk3fktnshm7tpopjrns6wevo4uwpnnz5aq352se4'
const cid = await client.put(files, {
name: 'web3-storage-dir-with-custom-max-chunk-size',
maxChunkSize: 1024 * 1024 * 5,
onRootCidReady: (cid) => {
assert.equal(cid, expectedCid, 'returned cid matches the CAR')
}
})
assert.equal(cid, expectedCid, 'returned cid matches the CAR')
})

it('adds big files', async function () {
this.timeout(60e3)
const client = new Web3Storage({ token, endpoint })
Expand Down Expand Up @@ -140,6 +164,20 @@ describe('putCar', () => {
assert.equal(cid, expectedCid, 'returned cid matches the CAR')
})

it('adds CAR files {maxChunkSize: custom-size}', async () => {
const client = new Web3Storage({ token, endpoint })
const carReader = await createCar('hello world')
const expectedCid = 'bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354'
const cid = await client.putCar(carReader, {
name: 'putCar test',
maxChunkSize: 1024 * 1024 * 5,
onRootCidReady: cid => {
assert.equal(cid, expectedCid, 'returned cid matches the CAR')
}
})
assert.equal(cid, expectedCid, 'returned cid matches the CAR')
})

it('errors for CAR with zero roots', async () => {
const client = new Web3Storage({ token, endpoint })
const { writer, out } = CarWriter.create([])
Expand Down Expand Up @@ -169,6 +207,17 @@ describe('putCar', () => {
}
})

it('errors for CAR with wrong max chunk size', async () => {
const client = new Web3Storage({ token, endpoint })
const carReader = await createCar('hello world')
try {
await client.putCar(carReader, { maxChunkSize: 10 })
assert.unreachable('should have thrown')
} catch (err) {
assert.match(err.message, /maximum chunk size must be less than 100MiB and greater than or equal to 1MB/)
}
})

it('put CAR with non-default decoder', async () => {
const client = new Web3Storage({ token, endpoint })
const block = await encode({ value: { hello: 'world' }, codec: json, hasher: sha256 })
Expand Down

0 comments on commit d525324

Please sign in to comment.