Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 99 additions & 0 deletions packages/storage-r2/src/client/R2ClientUploadHandler.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
'use client'

import { createClientUploadHandler } from '@payloadcms/plugin-cloud-storage/client'
import { toast as PayloadToast } from '@payloadcms/ui'

import type {
R2MultipartUpload,
R2StorageClientUploadContext,
R2StorageClientUploadHandlerParams,
R2StorageMultipartUploadHandlerParams,
R2UploadedPart,
} from '../types.js'

const R2ClientUploadHandler = createClientUploadHandler<R2StorageClientUploadHandlerParams>({
handler: async ({
apiRoute,
collectionSlug,
extra: { chunkSize = 5 * 1024 * 1024, prefix = '' },
file,
serverHandlerPath,
serverURL,
updateFilename,
}): Promise<R2StorageClientUploadContext | undefined> => {
const bytesTotal = file.size
const endpoint = () =>
`${serverURL}${apiRoute}${serverHandlerPath}?${new URLSearchParams(params)}`
const params: R2StorageMultipartUploadHandlerParams = {
collection: collectionSlug,
fileName: file.name,
fileType: file.type,
}

const toast = PayloadToast.getHistory()[PayloadToast.getHistory().length - 1]?.id
const toastFormat = (bytes: number) =>
bytesTotal > 1_000_000_000
? `${(bytes / 1_000_000_000).toFixed(1)} GB`
: bytesTotal > 1_000_000
? `${(bytes / 1_000_000).toFixed(1)} MB`
: bytesTotal > 1_000
? `${(bytes / 1_000).toFixed(0)} KB`
: `${bytes} bytes`
try {
const multipart = await fetch(endpoint(), { method: 'POST' })
if (!multipart.ok) {throw new Error('Failed to initialize multipart upload')}

const multipartUpload = (await multipart.json()) as Pick<
R2MultipartUpload,
'key' | 'uploadId'
>
const multipartUploadedParts: R2UploadedPart[] = []

params.multipartId = multipartUpload.uploadId
params.multipartKey = multipartUpload.key

const partTotal = Math.ceil(file.size / chunkSize)

for (let part = 1; part <= partTotal; part++) {
const bytesEnd = Math.min(part * chunkSize, bytesTotal)
const bytesStart = (part - 1) * chunkSize
const bytesPercentage = ((bytesStart * 100) / bytesTotal).toFixed(2)

params.multipartNumber = String(part)

PayloadToast.loading(
`Uploading... ${toastFormat(bytesStart)} / ${toastFormat(bytesTotal)} (${bytesPercentage}%)`,
{ id: toast },
)
const body = file.slice(bytesStart, bytesEnd)
const headers = {
'Content-Length': String(body.size),
'Content-Type': 'application/octet-stream',
}
const uploaded = await fetch(endpoint(), { body, headers, method: 'POST' })
if (!uploaded.ok) {throw new Error(`Failed to upload part ${part} / ${partTotal}`)}

multipartUploadedParts.push((await uploaded.json()) as R2UploadedPart)

if (part === partTotal) {
delete params.multipartNumber

const body = JSON.stringify(multipartUploadedParts)
const headers = { 'Content-Type': 'application/json' }
const complete = await fetch(endpoint(), { body, headers, method: 'POST' })
if (!complete.ok) {throw new Error(`Failed to complete multipart upload`)}

PayloadToast.success('Upload complete!', { id: toast })
return { key: await complete.text() }
}
}
} catch (e) {
const error = e as Error

console.error('Upload failed', error)
PayloadToast.error('Upload failed. Error: ' + error.message, { id: toast })
}
},
})

export default R2ClientUploadHandler
72 changes: 72 additions & 0 deletions packages/storage-r2/src/handleMultiPartUpload.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import type { ClientUploadsAccess } from '@payloadcms/plugin-cloud-storage/types'
import type { PayloadHandler } from 'payload'

import path from 'path'
import { APIError, Forbidden } from 'payload'

import type { R2StorageOptions } from './index.js'
import type { R2Bucket, R2StorageMultipartUploadHandlerParams } from './types.js'

type Args = {
access?: ClientUploadsAccess
bucket: R2Bucket
collections: R2StorageOptions['collections']
}

const defaultAccess: Args['access'] = ({ req }) => !!req.user

// Adapted from https://developers.cloudflare.com/r2/api/workers/workers-multipart-usage/
export const getHandleMultiPartUpload =
({ access = defaultAccess, bucket, collections }: Args): PayloadHandler =>
async (req) => {
const params = Object.fromEntries(req.searchParams) as R2StorageMultipartUploadHandlerParams
const collectionSlug = params.collection
const filename = params.fileName
const filetype = params.fileType

const collectionConfig = collections[collectionSlug]
if (!collectionConfig) {
throw new APIError(`Collection ${collectionSlug} was not found in R2 Storage options`)
}

if (!(await access({ collectionSlug, req }))) {
throw new Forbidden()
}

const prefix = (typeof collectionConfig === 'object' && collectionConfig.prefix) || ''
const fileKey = path.posix.join(prefix, filename)

const multipartId = params.multipartId
const multipartKey = params.multipartKey
const multipartNumber = parseInt(params.multipartNumber || '')

if (multipartId && multipartKey) {
const multipartUpload = bucket.resumeMultipartUpload(multipartKey, multipartId)
const request = req as Request

if (isNaN(multipartNumber)) {
// Upload complete
const object = await multipartUpload.complete((await request.json()) as any)
return new Response(object.key, { status: 200 })
} else {
// Upload part
const uploadedPart = await multipartUpload.uploadPart(
multipartNumber,
await request.arrayBuffer(),
)
return Response.json(uploadedPart)
}
} else {
// Create multipart upload
const multipartUpload = await bucket.createMultipartUpload(fileKey, {
httpMetadata: {
contentType: filetype,
},
})

return Response.json({
key: multipartUpload.key,
uploadId: multipartUpload.uploadId,
})
}
}
16 changes: 8 additions & 8 deletions packages/storage-r2/src/handleUpload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ interface Args {
}

export const getHandleUpload = ({ bucket, prefix = '' }: Args): HandleUpload => {
return async ({ data, file }) => {
// Read more: https://github.com/cloudflare/workers-sdk/issues/6047#issuecomment-2691217843
const buffer = process.env.NODE_ENV === 'development' ? new Blob([file.buffer]) : file.buffer
await bucket.put(path.posix.join(data.prefix || prefix, file.filename), buffer, {
httpMetadata: { contentType: file.mimeType },
})

return data
return async ({ clientUploadContext, data, file }) => {
if (clientUploadContext == undefined) {
// Read more: https://github.com/cloudflare/workers-sdk/issues/6047#issuecomment-2691217843
const buffer = process.env.NODE_ENV === 'development' ? new Blob([file.buffer]) : file.buffer
await bucket.put(path.posix.join(data.prefix || prefix, file.filename), buffer, {
httpMetadata: { contentType: file.mimeType },
})
}
}
}
35 changes: 33 additions & 2 deletions packages/storage-r2/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,21 +1,28 @@
import type {
Adapter,
ClientUploadsConfig,
PluginOptions as CloudStoragePluginOptions,
CollectionOptions,
GeneratedAdapter,
} from '@payloadcms/plugin-cloud-storage/types'
import type { Config, Plugin, UploadCollectionSlug } from 'payload'

import { cloudStoragePlugin } from '@payloadcms/plugin-cloud-storage'
import { initClientUploads } from '@payloadcms/plugin-cloud-storage/utilities'

import type { R2Bucket } from './types.js'
import type { R2Bucket, R2StorageClientUploadHandlerParams } from './types.js'

import { getHandleDelete } from './handleDelete.js'
import { getHandleMultiPartUpload } from './handleMultiPartUpload.js'
import { getHandleUpload } from './handleUpload.js'
import { getHandler } from './staticHandler.js'

export interface R2StorageOptions {
bucket: R2Bucket
/**
* Do uploads directly on the client, to bypass limits on Cloudflare/Vercel.
*/
clientUploads?: ClientUploadsConfig
/**
* Collection options to apply the R2 adapter to.
*/
Expand All @@ -32,6 +39,29 @@ export const r2Storage: R2StoragePlugin =

const isPluginDisabled = r2StorageOptions.enabled === false

initClientUploads<
R2StorageClientUploadHandlerParams,
R2StorageOptions['collections'][keyof R2StorageOptions['collections']]
>({
clientHandler: '@payloadcms/storage-r2/client/R2ClientUploadHandler',
collections: r2StorageOptions.collections,
config: incomingConfig,
enabled: !isPluginDisabled && Boolean(r2StorageOptions.clientUploads),
extraClientHandlerProps: (collection) => ({
prefix:
(typeof collection === 'object' && collection.prefix && `${collection.prefix}/`) || '',
}),
serverHandler: getHandleMultiPartUpload({
access:
typeof r2StorageOptions.clientUploads === 'object'
? r2StorageOptions.clientUploads.access
: undefined,
bucket: r2StorageOptions.bucket,
collections: r2StorageOptions.collections,
}),
serverHandlerPath: '/storage-r2-multi-part-upload',
})

if (isPluginDisabled) {
return incomingConfig
}
Expand Down Expand Up @@ -73,10 +103,11 @@ export const r2Storage: R2StoragePlugin =
})(config)
}

function r2StorageInternal({ bucket }: R2StorageOptions): Adapter {
function r2StorageInternal({ bucket, clientUploads }: R2StorageOptions): Adapter {
return ({ collection, prefix }): GeneratedAdapter => {
return {
name: 'r2',
clientUploads,
handleDelete: getHandleDelete({ bucket }),
handleUpload: getHandleUpload({
bucket,
Expand Down
31 changes: 27 additions & 4 deletions packages/storage-r2/src/staticHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import type { CollectionConfig } from 'payload'

import path from 'path'

import type { R2Bucket } from './types.js'
import type { R2Bucket, R2ObjectBody } from './types.js'

interface Args {
bucket: R2Bucket
Expand All @@ -14,18 +14,41 @@ interface Args {
const isMiniflare = process.env.NODE_ENV === 'development'

export const getHandler = ({ bucket, prefix = '' }: Args): StaticHandler => {
return async (req, { params: { filename } }) => {
return async (req, { params: { clientUploadContext, filename } }) => {
// Due to https://github.com/cloudflare/workers-sdk/issues/6047
// We cannot send a Headers instance to Miniflare
const obj = await bucket?.get(path.posix.join(prefix, filename), {
const obj: R2ObjectBody = await bucket?.get(path.posix.join(prefix, filename), {
range: isMiniflare ? undefined : req.headers,
})

if (obj?.body == undefined) {
return new Response(null, { status: 404 })
}
// Don't return large file uploads back to the client, or the Worker will run out of memory
if (obj?.size > 50 * 1024 * 1024 && clientUploadContext) {
return new Response(null, { status: 200 })
}

const headers = new Headers()
if (!isMiniflare) {
const metadata = obj.httpMetadata

if (isMiniflare) {
if (metadata?.cacheControl) {
headers.set('Cache-Control', metadata.cacheControl)
}
if (metadata?.contentDisposition) {
headers.set('Content-Disposition', metadata.contentDisposition)
}
if (metadata?.contentEncoding) {
headers.set('Content-Encoding', metadata.contentEncoding)
}
if (metadata?.contentLanguage) {
headers.set('Content-Language', metadata.contentLanguage)
}
if (metadata?.contentType) {
headers.set('Content-Type', metadata.contentType)
}
} else {
obj.writeHttpMetadata(headers)
}

Expand Down
59 changes: 57 additions & 2 deletions packages/storage-r2/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
export interface R2Bucket {
createMultipartUpload(key: string, options?: any): Promise<any>
createMultipartUpload(key: string, options?: any): Promise<R2MultipartUpload>
delete(keys: string | string[]): Promise<void>
get(
key: string,
Expand All @@ -23,5 +23,60 @@ export interface R2Bucket {
value: ArrayBuffer | ArrayBufferView | Blob | null | ReadableStream | string,
options?: any,
): Promise<any>
resumeMultipartUpload(key: string, uploadId: string): any
resumeMultipartUpload(key: string, uploadId: string): R2MultipartUpload
}

interface R2HTTPMetadata {
cacheControl?: string
cacheExpiry?: Date
contentDisposition?: string
contentEncoding?: string
contentLanguage?: string
contentType?: string
}

export interface R2Object {
readonly etag: string
readonly httpMetadata?: R2HTTPMetadata
readonly key: string
readonly size: number

writeHttpMetadata(headers: Headers): void
}
export interface R2ObjectBody extends R2Object {
get body(): ReadableStream
}

export interface R2MultipartUpload {
abort(): Promise<void>
complete(uploadedParts: R2UploadedPart[]): Promise<R2Object>
readonly key: string
readonly uploadId: string
uploadPart(
partNumber: number,
value: (ArrayBuffer | ArrayBufferView) | Blob | ReadableStream | string,
options?: any,
): Promise<R2UploadedPart>
}

export interface R2StorageClientUploadContext {
key: string
}
export type R2StorageClientUploadHandlerParams = {
chunkSize?: number
prefix: string
}

export type R2StorageMultipartUploadHandlerParams = {
collection: string
fileName: string
fileType: string
multipartId?: string
multipartKey?: string
multipartNumber?: string
}

export interface R2UploadedPart {
etag: string
partNumber: number
}
Loading