From f7b059db9bca336c981980a93a2d422d643d183f Mon Sep 17 00:00:00 2001 From: ctrlc03 <93448202+ctrlc03@users.noreply.github.com> Date: Tue, 18 Jul 2023 18:02:28 +0100 Subject: [PATCH] feat(setup): add cloud function for file transfer and unit tests --- packages/actions/src/helpers/constants.ts | 3 +- packages/actions/src/helpers/functions.ts | 28 +++++++++ packages/actions/src/helpers/utils.ts | 4 +- packages/actions/src/index.ts | 3 +- packages/actions/src/types/index.ts | 1 + .../test/data/artifacts/ceremonySetup.json | 2 +- packages/actions/test/unit/storage.test.ts | 60 ++++++++++++++++++- packages/actions/test/utils/storage.ts | 3 +- packages/backend/src/functions/index.ts | 3 +- packages/backend/src/functions/storage.ts | 58 +++++++++++++++++- packages/backend/src/types/index.ts | 17 ++++++ 11 files changed, 173 insertions(+), 9 deletions(-) diff --git a/packages/actions/src/helpers/constants.ts b/packages/actions/src/helpers/constants.ts index 110bb83b..303c5f10 100644 --- a/packages/actions/src/helpers/constants.ts +++ b/packages/actions/src/helpers/constants.ts @@ -306,6 +306,7 @@ export const commonTerms = { checkAndPrepareCoordinatorForFinalization: "checkAndPrepareCoordinatorForFinalization", finalizeCircuit: "finalizeCircuit", finalizeCeremony: "finalizeCeremony", - downloadCircuitArtifacts: "downloadCircuitArtifacts" + downloadCircuitArtifacts: "downloadCircuitArtifacts", + transferObject: "transferObject", } } diff --git a/packages/actions/src/helpers/functions.ts b/packages/actions/src/helpers/functions.ts index cac1714c..cbf14924 100644 --- a/packages/actions/src/helpers/functions.ts +++ b/packages/actions/src/helpers/functions.ts @@ -436,3 +436,31 @@ export const finalizeCeremony = async (functions: Functions, ceremonyId: string) ceremonyId }) } + +/** + * Transfer an object between two buckets + * @param functions - the Firebase cloud functions object instance. + * @param originBucketName - the name of the origin bucket. + * @param originObjectKey - the key of the origin object. + * @param destinationBucketName - the name of the destination bucket. + * @param destinationObjectKey - the key of the destination object. + * @returns > - true when the transfer is completed; otherwise false. + */ +export const transferObject = async ( + functions: Functions, + originBucketName: string, + originObjectKey: string, + destinationBucketName: string, + destinationObjectKey: string +): Promise => { + const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.transferObject) + + const { data: result }: any= await cf({ + originBucketName, + originObjectKey, + destinationBucketName, + destinationObjectKey + }) + + return result +} \ No newline at end of file diff --git a/packages/actions/src/helpers/utils.ts b/packages/actions/src/helpers/utils.ts index 99f2b5c1..93ea1aba 100644 --- a/packages/actions/src/helpers/utils.ts +++ b/packages/actions/src/helpers/utils.ts @@ -93,7 +93,9 @@ export const parseCeremonyFile = async (path: string, cleanup: boolean = false): const localR1csPath = `./${circuitData.name}.r1cs` // check that the artifacts exist in S3 - const s3 = new S3Client({region: 'us-east-1'}) + // we don't need any privileges to download this + // just the correct region + const s3 = new S3Client({region: artifacts.region}) try { await s3.send(new HeadObjectCommand({ diff --git a/packages/actions/src/index.ts b/packages/actions/src/index.ts index d0792646..a63db2b8 100644 --- a/packages/actions/src/index.ts +++ b/packages/actions/src/index.ts @@ -87,7 +87,8 @@ export { verifyContribution, checkAndPrepareCoordinatorForFinalization, finalizeCircuit, - finalizeCeremony + finalizeCeremony, + transferObject } from "./helpers/functions" export { toHex, blake512FromPath, computeSHA256ToHex, compareHashes } from "./helpers/crypto" export { diff --git a/packages/actions/src/types/index.ts b/packages/actions/src/types/index.ts index dcaace6e..fa074552 100644 --- a/packages/actions/src/types/index.ts +++ b/packages/actions/src/types/index.ts @@ -624,6 +624,7 @@ export type SetupCeremonyData = { export type CeremonySetupTemplateCircuitArtifacts = { artifacts: { bucket: string + region: string r1csStoragePath: string wasmStoragePath: string } diff --git a/packages/actions/test/data/artifacts/ceremonySetup.json b/packages/actions/test/data/artifacts/ceremonySetup.json index db486d54..bf51fcd8 100644 --- a/packages/actions/test/data/artifacts/ceremonySetup.json +++ b/packages/actions/test/data/artifacts/ceremonySetup.json @@ -1,5 +1,5 @@ { - "title": "Test dev ceremony", + "title": "Test dev 2 ceremony", "description": "This is an example ceremony", "startDate": "2023-08-07T00:00:00", "endDate": "2023-09-10T00:00:00", diff --git a/packages/actions/test/unit/storage.test.ts b/packages/actions/test/unit/storage.test.ts index f996754c..8747e12d 100644 --- a/packages/actions/test/unit/storage.test.ts +++ b/packages/actions/test/unit/storage.test.ts @@ -17,7 +17,9 @@ import { cleanUpMockUsers, sleep, cleanUpRecursively, - mockCeremoniesCleanup + mockCeremoniesCleanup, + generatePseudoRandomStringOfNumbers, + uploadFileToS3 } from "../utils/index" import { fakeCeremoniesData, fakeCircuitsData, fakeUsersData } from "../data/samples" import { @@ -39,7 +41,7 @@ import { import { TestingEnvironment } from "../../src/types/enums" import { ChunkWithUrl, ETagWithPartNumber } from "../../src/types/index" import { getChunksAndPreSignedUrls, getWasmStorageFilePath, uploadParts } from "../../src/helpers/storage" -import { completeMultiPartUpload, openMultiPartUpload } from "../../src/helpers/functions" +import { completeMultiPartUpload, openMultiPartUpload, transferObject } from "../../src/helpers/functions" chai.use(chaiAsPromised) @@ -684,6 +686,60 @@ describe("Storage", () => { }) }) + describe("transferObject", () => { + // we need two buckets - source and destination + const sourceBucketName = generatePseudoRandomStringOfNumbers(10) + const destinationBucketName = generatePseudoRandomStringOfNumbers(10) + const objectKey = "test.txt" + fs.writeFileSync(objectKey, "test") + + beforeAll(async () => { + // login as coordinator + await signInWithEmailAndPassword(userAuth, users[1].data.email, passwords[1]) + // create the buckets and upload the file + await createS3Bucket(userFunctions, sourceBucketName) + await createS3Bucket(userFunctions, destinationBucketName) + await uploadFileToS3( + sourceBucketName, + objectKey, + objectKey + ) + }) + + it("should successfully transfer an object between buckets", async () => { + const result = await transferObject( + userFunctions, + sourceBucketName, + objectKey, + destinationBucketName, + objectKey + ) + + expect(result).to.be.true + }) + + it("should transfer an object between buckets in different regions", async () => {}) + it("should throw when trying to transfer an object that does not exist", async () => { + await expect(transferObject( + userFunctions, + sourceBucketName, + "i-dont-exist.txt", + destinationBucketName, + objectKey + )).to.be.rejected + }) + + afterAll(async () => { + // delete the buckets + await deleteObjectFromS3(sourceBucketName, objectKey) + await deleteObjectFromS3(destinationBucketName, objectKey) + await deleteBucket(sourceBucketName) + await deleteBucket(destinationBucketName) + + fs.unlinkSync(objectKey) + }) + }) + // @todo this is not used in the cli yet describe("uploadFileToStorage", () => { it("should successfully upload a file to storage", async () => {}) diff --git a/packages/actions/test/utils/storage.ts b/packages/actions/test/utils/storage.ts index 235b1cc4..ee5e88fa 100644 --- a/packages/actions/test/utils/storage.ts +++ b/packages/actions/test/utils/storage.ts @@ -106,7 +106,8 @@ export const uploadFileToS3 = async (bucketName: string, objectKey: string, path const params = { Bucket: bucketName, Key: objectKey, - Body: fs.createReadStream(path) + Body: fs.createReadStream(path), + ACL: "public-read" } const command = new PutObjectCommand(params) diff --git a/packages/backend/src/functions/index.ts b/packages/backend/src/functions/index.ts index 993a1d22..98ac27d4 100644 --- a/packages/backend/src/functions/index.ts +++ b/packages/backend/src/functions/index.ts @@ -29,7 +29,8 @@ export { generateGetObjectPreSignedUrl, startMultiPartUpload, generatePreSignedUrlsParts, - completeMultiPartUpload + completeMultiPartUpload, + transferObject } from "./storage" export { checkAndRemoveBlockingContributor, resumeContributionAfterTimeoutExpiration } from "./timeout" diff --git a/packages/backend/src/functions/storage.ts b/packages/backend/src/functions/storage.ts index b78be507..0b0b29ea 100644 --- a/packages/backend/src/functions/storage.ts +++ b/packages/backend/src/functions/storage.ts @@ -1,6 +1,8 @@ import * as functions from "firebase-functions" import admin from "firebase-admin" import { + S3Client, + CopyObjectCommand, GetObjectCommand, CreateMultipartUploadCommand, UploadPartCommand, @@ -30,7 +32,8 @@ import { CompleteMultiPartUploadData, CreateBucketData, GeneratePreSignedUrlsPartsData, - StartMultiPartUploadData + StartMultiPartUploadData, + TransferObjectData } from "../types/index" dotenv.config() @@ -228,6 +231,59 @@ export const createBucket = functions } }) +/** + * Transfer a public object from one bucket to another. + * @returns > - true if the operation was successful; otherwise false. + */ +export const transferObject = functions + .runWith({ + memory: "512MB" + }) + .https.onCall(async (data: TransferObjectData, context: functions.https.CallableContext): Promise => { + // Check if the user has the coordinator claim. + if (!context.auth || !context.auth.token.coordinator) logAndThrowError(COMMON_ERRORS.CM_NOT_COORDINATOR_ROLE) + + if ( + !data.sourceBucketName || + !data.sourceObjectKey || + !data.destinationBucketName || + !data.destinationObjectKey || + !data.sourceRegion + ) logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA) + + // Connect to S3 client. + const S3 = await getS3Client() + + const copyParams = { + Bucket: data.destinationBucketName, + CopySource: `${data.sourceBucketName}/${encodeURIComponent(data.sourceObjectKey)}`, + Key: data.destinationObjectKey, + } + + const command = new CopyObjectCommand(copyParams) + + try { + // Execute S3 command. + await S3.send(command) + + printLog( + `The object was copied from ${data.sourceBucketName} to ${data.destinationBucketName}`, + LogLevel.LOG + ) + + return true + } catch (error: any) { + // eslint-disable-next-line @typescript-eslint/no-shadow + if (error.$metadata.httpStatusCode === 403) logAndThrowError(SPECIFIC_ERRORS.SE_STORAGE_MISSING_PERMISSIONS) + + // @todo handle more specific errors here. + + // nb. do not handle common errors! This method must return false if not found! + } + + return false + }) + /** * Check if a specified object exist in a given AWS S3 bucket. * @returns > - true if the object exist in the given bucket; otherwise false. diff --git a/packages/backend/src/types/index.ts b/packages/backend/src/types/index.ts index 130e87f8..4b2bb5f1 100644 --- a/packages/backend/src/types/index.ts +++ b/packages/backend/src/types/index.ts @@ -33,6 +33,23 @@ export type BucketAndObjectKeyData = { objectKey: string } +/** + * Group all the necessary data needed for running the `transferObject` cloud function. + * @typedef {Object} TransferObjectData + * @property {string} sourceRegion - the region of the source bucket. + * @property {string} sourceBucketName - the name of the source bucket. + * @property {string} sourceObjectKey - the unique key to identify the object inside the given AWS S3 source bucket. + * @property {string} destinationBucketName - the name of the destination bucket. + * @property {string} destinationObjectKey - the unique key to identify the object inside the given AWS S3 destination bucket. + */ +export type TransferObjectData = { + sourceRegion: string + sourceBucketName: string + sourceObjectKey: string + destinationBucketName: string + destinationObjectKey: string +} + /** * Group all the necessary data needed for running the `startMultiPartUpload` cloud function. * @typedef {Object} StartMultiPartUploadData