Skip to content

Commit

Permalink
feat(setup): add cloud function for file transfer and unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ctrlc03 committed Jul 18, 2023
1 parent d032f37 commit f7b059d
Show file tree
Hide file tree
Showing 11 changed files with 173 additions and 9 deletions.
3 changes: 2 additions & 1 deletion packages/actions/src/helpers/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,7 @@ export const commonTerms = {
checkAndPrepareCoordinatorForFinalization: "checkAndPrepareCoordinatorForFinalization",
finalizeCircuit: "finalizeCircuit",
finalizeCeremony: "finalizeCeremony",
downloadCircuitArtifacts: "downloadCircuitArtifacts"
downloadCircuitArtifacts: "downloadCircuitArtifacts",
transferObject: "transferObject",
}
}
28 changes: 28 additions & 0 deletions packages/actions/src/helpers/functions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -436,3 +436,31 @@ export const finalizeCeremony = async (functions: Functions, ceremonyId: string)
ceremonyId
})
}

/**
* Transfer an object between two buckets
* @param functions <Functions> - the Firebase cloud functions object instance.
* @param originBucketName <string> - the name of the origin bucket.
* @param originObjectKey <string> - the key of the origin object.
* @param destinationBucketName <string> - the name of the destination bucket.
* @param destinationObjectKey <string> - the key of the destination object.
* @returns <Promise<boolean>> - true when the transfer is completed; otherwise false.
*/
export const transferObject = async (
functions: Functions,
originBucketName: string,
originObjectKey: string,
destinationBucketName: string,
destinationObjectKey: string
): Promise<boolean> => {
const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.transferObject)

const { data: result }: any= await cf({
originBucketName,
originObjectKey,
destinationBucketName,
destinationObjectKey
})

return result
}
4 changes: 3 additions & 1 deletion packages/actions/src/helpers/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,9 @@ export const parseCeremonyFile = async (path: string, cleanup: boolean = false):
const localR1csPath = `./${circuitData.name}.r1cs`

// check that the artifacts exist in S3
const s3 = new S3Client({region: 'us-east-1'})
// we don't need any privileges to download this
// just the correct region
const s3 = new S3Client({region: artifacts.region})

try {
await s3.send(new HeadObjectCommand({
Expand Down
3 changes: 2 additions & 1 deletion packages/actions/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,8 @@ export {
verifyContribution,
checkAndPrepareCoordinatorForFinalization,
finalizeCircuit,
finalizeCeremony
finalizeCeremony,
transferObject
} from "./helpers/functions"
export { toHex, blake512FromPath, computeSHA256ToHex, compareHashes } from "./helpers/crypto"
export {
Expand Down
1 change: 1 addition & 0 deletions packages/actions/src/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -624,6 +624,7 @@ export type SetupCeremonyData = {
export type CeremonySetupTemplateCircuitArtifacts = {
artifacts: {
bucket: string
region: string
r1csStoragePath: string
wasmStoragePath: string
}
Expand Down
2 changes: 1 addition & 1 deletion packages/actions/test/data/artifacts/ceremonySetup.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"title": "Test dev ceremony",
"title": "Test dev 2 ceremony",
"description": "This is an example ceremony",
"startDate": "2023-08-07T00:00:00",
"endDate": "2023-09-10T00:00:00",
Expand Down
60 changes: 58 additions & 2 deletions packages/actions/test/unit/storage.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ import {
cleanUpMockUsers,
sleep,
cleanUpRecursively,
mockCeremoniesCleanup
mockCeremoniesCleanup,
generatePseudoRandomStringOfNumbers,
uploadFileToS3
} from "../utils/index"
import { fakeCeremoniesData, fakeCircuitsData, fakeUsersData } from "../data/samples"
import {
Expand All @@ -39,7 +41,7 @@ import {
import { TestingEnvironment } from "../../src/types/enums"
import { ChunkWithUrl, ETagWithPartNumber } from "../../src/types/index"
import { getChunksAndPreSignedUrls, getWasmStorageFilePath, uploadParts } from "../../src/helpers/storage"
import { completeMultiPartUpload, openMultiPartUpload } from "../../src/helpers/functions"
import { completeMultiPartUpload, openMultiPartUpload, transferObject } from "../../src/helpers/functions"

chai.use(chaiAsPromised)

Expand Down Expand Up @@ -684,6 +686,60 @@ describe("Storage", () => {
})
})

describe("transferObject", () => {
// we need two buckets - source and destination
const sourceBucketName = generatePseudoRandomStringOfNumbers(10)
const destinationBucketName = generatePseudoRandomStringOfNumbers(10)
const objectKey = "test.txt"
fs.writeFileSync(objectKey, "test")

beforeAll(async () => {
// login as coordinator
await signInWithEmailAndPassword(userAuth, users[1].data.email, passwords[1])
// create the buckets and upload the file
await createS3Bucket(userFunctions, sourceBucketName)
await createS3Bucket(userFunctions, destinationBucketName)
await uploadFileToS3(
sourceBucketName,
objectKey,
objectKey
)
})

it("should successfully transfer an object between buckets", async () => {
const result = await transferObject(
userFunctions,
sourceBucketName,
objectKey,
destinationBucketName,
objectKey
)

expect(result).to.be.true
})

it("should transfer an object between buckets in different regions", async () => {})
it("should throw when trying to transfer an object that does not exist", async () => {
await expect(transferObject(
userFunctions,
sourceBucketName,
"i-dont-exist.txt",
destinationBucketName,
objectKey
)).to.be.rejected
})

afterAll(async () => {
// delete the buckets
await deleteObjectFromS3(sourceBucketName, objectKey)
await deleteObjectFromS3(destinationBucketName, objectKey)
await deleteBucket(sourceBucketName)
await deleteBucket(destinationBucketName)

fs.unlinkSync(objectKey)
})
})

// @todo this is not used in the cli yet
describe("uploadFileToStorage", () => {
it("should successfully upload a file to storage", async () => {})
Expand Down
3 changes: 2 additions & 1 deletion packages/actions/test/utils/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,8 @@ export const uploadFileToS3 = async (bucketName: string, objectKey: string, path
const params = {
Bucket: bucketName,
Key: objectKey,
Body: fs.createReadStream(path)
Body: fs.createReadStream(path),
ACL: "public-read"
}

const command = new PutObjectCommand(params)
Expand Down
3 changes: 2 additions & 1 deletion packages/backend/src/functions/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ export {
generateGetObjectPreSignedUrl,
startMultiPartUpload,
generatePreSignedUrlsParts,
completeMultiPartUpload
completeMultiPartUpload,
transferObject
} from "./storage"
export { checkAndRemoveBlockingContributor, resumeContributionAfterTimeoutExpiration } from "./timeout"

Expand Down
58 changes: 57 additions & 1 deletion packages/backend/src/functions/storage.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import * as functions from "firebase-functions"
import admin from "firebase-admin"
import {
S3Client,
CopyObjectCommand,
GetObjectCommand,
CreateMultipartUploadCommand,
UploadPartCommand,
Expand Down Expand Up @@ -30,7 +32,8 @@ import {
CompleteMultiPartUploadData,
CreateBucketData,
GeneratePreSignedUrlsPartsData,
StartMultiPartUploadData
StartMultiPartUploadData,
TransferObjectData
} from "../types/index"

dotenv.config()
Expand Down Expand Up @@ -228,6 +231,59 @@ export const createBucket = functions
}
})

/**
* Transfer a public object from one bucket to another.
* @returns <Promise<boolean>> - true if the operation was successful; otherwise false.
*/
export const transferObject = functions
.runWith({
memory: "512MB"
})
.https.onCall(async (data: TransferObjectData, context: functions.https.CallableContext): Promise<boolean> => {
// Check if the user has the coordinator claim.
if (!context.auth || !context.auth.token.coordinator) logAndThrowError(COMMON_ERRORS.CM_NOT_COORDINATOR_ROLE)

if (
!data.sourceBucketName ||
!data.sourceObjectKey ||
!data.destinationBucketName ||
!data.destinationObjectKey ||
!data.sourceRegion
) logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA)

// Connect to S3 client.
const S3 = await getS3Client()

const copyParams = {
Bucket: data.destinationBucketName,
CopySource: `${data.sourceBucketName}/${encodeURIComponent(data.sourceObjectKey)}`,
Key: data.destinationObjectKey,
}

const command = new CopyObjectCommand(copyParams)

try {
// Execute S3 command.
await S3.send(command)

printLog(
`The object was copied from ${data.sourceBucketName} to ${data.destinationBucketName}`,
LogLevel.LOG
)

return true
} catch (error: any) {
// eslint-disable-next-line @typescript-eslint/no-shadow
if (error.$metadata.httpStatusCode === 403) logAndThrowError(SPECIFIC_ERRORS.SE_STORAGE_MISSING_PERMISSIONS)

// @todo handle more specific errors here.

// nb. do not handle common errors! This method must return false if not found!
}

return false
})

/**
* Check if a specified object exist in a given AWS S3 bucket.
* @returns <Promise<boolean>> - true if the object exist in the given bucket; otherwise false.
Expand Down
17 changes: 17 additions & 0 deletions packages/backend/src/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,23 @@ export type BucketAndObjectKeyData = {
objectKey: string
}

/**
* Group all the necessary data needed for running the `transferObject` cloud function.
* @typedef {Object} TransferObjectData
* @property {string} sourceRegion - the region of the source bucket.
* @property {string} sourceBucketName - the name of the source bucket.
* @property {string} sourceObjectKey - the unique key to identify the object inside the given AWS S3 source bucket.
* @property {string} destinationBucketName - the name of the destination bucket.
* @property {string} destinationObjectKey - the unique key to identify the object inside the given AWS S3 destination bucket.
*/
export type TransferObjectData = {
sourceRegion: string
sourceBucketName: string
sourceObjectKey: string
destinationBucketName: string
destinationObjectKey: string
}

/**
* Group all the necessary data needed for running the `startMultiPartUpload` cloud function.
* @typedef {Object} StartMultiPartUploadData
Expand Down

0 comments on commit f7b059d

Please sign in to comment.