diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ccb61c7a..05d75411 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -41,6 +41,14 @@ jobs: - name: Install run: npm install + - name: Link (npm < 7) + if: matrix.node-version == '14.x' + run: | + cd plugins/s3 + npm link + cd ../../ + npm link @aws-lite/s3 + - name: Test run: npm test env: diff --git a/package.json b/package.json index 100f753a..b99d3f68 100644 --- a/package.json +++ b/package.json @@ -9,8 +9,8 @@ }, "bugs": "https://github.com/architect/aws-lite/issues", "scripts": { - "generate": "npm run generate-plugins", - "generate-plugins": "node scripts/generate-plugins", + "gen": "npm run generate-plugins", + "generate-plugins": "node scripts/generate-plugins/index.mjs", "publish-plugins": "node scripts/publish-plugins", "lint": "eslint --fix .", "test": "npm run lint && npm run coverage", @@ -30,22 +30,23 @@ "ini": "^4.1.1" }, "devDependencies": { - "@architect/eslint-config": "^2.1.1", + "@architect/eslint-config": "^2.1.2", "@aws-sdk/client-ssm": "^3.405.0", - "@aws-sdk/util-dynamodb": "^3.415.0", + "@aws-sdk/util-dynamodb": "^3.423.0", "adm-zip": "^0.5.10", "cross-env": "^7.0.3", - "eslint": "^8.48.0", + "eslint": "^8.50.0", "mock-fs": "^5.2.0", "nyc": "^15.1.0", "tap-spec": "^5.0.0", - "tape": "^5.6.6" + "tape": "^5.7.0" }, "files": [ "src" ], "workspaces": [ - "plugins/dynamodb" + "plugins/dynamodb", + "plugins/s3" ], "eslintConfig": { "extends": "@architect/eslint-config" diff --git a/plugins/dynamodb/readme.md b/plugins/dynamodb/readme.md index 15e31b04..78df97f3 100644 --- a/plugins/dynamodb/readme.md +++ b/plugins/dynamodb/readme.md @@ -12,6 +12,13 @@ npm i @aws-lite/dynamodb ``` +## Docs + + + + + + ## Usage This plugin covers all DynamoDB methods (listed & linked below), utilizing DynamoDB's semantics. diff --git a/plugins/dynamodb/src/index.mjs b/plugins/dynamodb/src/index.mjs index 03582cf2..c3391c0d 100644 --- a/plugins/dynamodb/src/index.mjs +++ b/plugins/dynamodb/src/index.mjs @@ -1,5 +1,6 @@ const service = 'dynamodb' const required = true +const docRoot = 'https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/' // Common params to be AWS-flavored JSON-encoded const awsjsonReq = [ 'ExclusiveStartKey', 'ExpressionAttributeValues', 'Item', 'Key', ] @@ -20,7 +21,8 @@ const Item = { ...obj, required } const ReturnConsumedCapacity = str const ReturnItemCollectionMetrics = str -const unmarshall = keys => async response => ({ awsjson: keys, response }) +const defaultResponse = ({ payload }) => payload +const unmarshall = keys => ({ payload }) => ({ awsjson: keys, ...payload }) const headers = (method, additional) => ({ 'X-Amz-Target': `DynamoDB_20120810.${method}`, ...additional }) const awsjsonContentType = { 'content-type': 'application/x-amz-json-1.0' } @@ -28,8 +30,8 @@ const awsjsonContentType = { 'content-type': 'application/x-amz-json-1.0' } * Plugin maintained by: @architect */ -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchExecuteStatement.html const BatchExecuteStatement = { + awsDoc: docRoot + 'API_BatchExecuteStatement.html', validate: { Statements: { ...arr, required }, ReturnConsumedCapacity, @@ -46,20 +48,20 @@ const BatchExecuteStatement = { payload: { ...params, Statements } } }, - response: async (response, { awsjsonUnmarshall }) => { - if (response?.Responses?.length) { - response.Responses = response.Responses.map(r => { + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (payload?.Responses?.length) { + payload.Responses = payload.Responses.map(r => { if (r?.Error?.Item) r.Error.Item = awsjsonUnmarshall(r.Error.Item) if (r?.Item) r.Item = awsjsonUnmarshall(r.Item) return r }) } - return { response } + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchGetItem.html const BatchGetItem = { + awsDoc: docRoot + 'API_BatchGetItem.html', validate: { RequestItems: { ...obj, required }, ReturnConsumedCapacity, @@ -76,24 +78,24 @@ const BatchGetItem = { payload: { ...params, RequestItems } } }, - response: async (response, { awsjsonUnmarshall }) => { - let Responses = Object.keys(response.Responses) + response: async ({ payload }, { awsjsonUnmarshall }) => { + let Responses = Object.keys(payload.Responses) if (Responses.length) { - Responses.forEach(i => response.Responses[i] = response.Responses[i]?.map(awsjsonUnmarshall)) + Responses.forEach(i => payload.Responses[i] = payload.Responses[i]?.map(awsjsonUnmarshall)) } - let UnprocessedKeys = Object.keys(response.UnprocessedKeys) + let UnprocessedKeys = Object.keys(payload.UnprocessedKeys) if (UnprocessedKeys.length) { - UnprocessedKeys.forEach(i => response.UnprocessedKeys[i] = { - ...response.UnprocessedKeys[i], - Keys: response.UnprocessedKeys[i]?.Keys?.map(awsjsonUnmarshall) + UnprocessedKeys.forEach(i => payload.UnprocessedKeys[i] = { + ...payload.UnprocessedKeys[i], + Keys: payload.UnprocessedKeys[i]?.Keys?.map(awsjsonUnmarshall) }) } - return { response } + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html const BatchWriteItem = { + awsDoc: docRoot + 'API_BatchWriteItem.html', validate: { RequestItems: { ...obj, required }, ReturnConsumedCapacity, @@ -121,9 +123,9 @@ const BatchWriteItem = { payload: { ...params, RequestItems } } }, - response: async (response, { awsjsonUnmarshall }) => { + response: async ({ payload }, { awsjsonUnmarshall }) => { let UnprocessedItems = {} - Object.entries(response.UnprocessedItems).forEach(([ table, items ]) => { + Object.entries(payload.UnprocessedItems).forEach(([ table, items ]) => { UnprocessedItems[table] = items.map(i => { let request = {} Object.entries(i).forEach(([ op, data ]) => { @@ -137,12 +139,12 @@ const BatchWriteItem = { return request }) }) - return { response: { ...response, UnprocessedItems } } + return { ...payload, UnprocessedItems } } } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_CreateBackup.html const CreateBackup = { + awsDoc: docRoot + 'API_CreateBackup.html', validate: { TableName, BackupName: { ...str, required }, @@ -151,10 +153,11 @@ const CreateBackup = { headers: headers('CreateBackup'), // Undocumented as of author time payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_CreateGlobalTable.html const CreateGlobalTable = { + awsDoc: docRoot + 'API_CreateGlobalTable.html', validate: { GlobalTableName: TableName, ReplicationGroup: { ...arr, required }, @@ -163,10 +166,11 @@ const CreateGlobalTable = { headers: headers('CreateGlobalTable'), // Undocumented as of author time payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_CreateTable.html const CreateTable = { + awsDoc: docRoot + 'API_CreateTable.html', validate: { TableName, AttributeDefinitions: { ...arr, required }, @@ -185,10 +189,11 @@ const CreateTable = { headers: headers('CreateTable'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DeleteBackup.html const DeleteBackup = { + awsDoc: docRoot + 'API_DeleteBackup.html', validate: { BackupArn: { ...str, required }, }, @@ -196,10 +201,11 @@ const DeleteBackup = { headers: headers('DeleteBackup'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DeleteItem.html const DeleteItem = { + awsDoc: docRoot + 'API_DeleteItem.html', validate: { TableName, Key, @@ -218,14 +224,14 @@ const DeleteItem = { headers: headers('DeleteItem'), payload: params, }), - response: async (response, { awsjsonUnmarshall }) => { - if (response?.Attributes) response.Attributes = awsjsonUnmarshall(response.Attributes) - return { response } + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (payload?.Attributes) payload.Attributes = awsjsonUnmarshall(payload.Attributes) + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DeleteTable.html const DeleteTable = { + awsDoc: docRoot + 'API_DeleteTable.html', validate: { TableName, }, @@ -233,10 +239,11 @@ const DeleteTable = { headers: headers('DeleteTable'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeBackup.html const DescribeBackup = { + awsDoc: docRoot + 'API_DescribeBackup.html', validate: { BackupArn: { ...str, required }, }, @@ -244,10 +251,11 @@ const DescribeBackup = { headers: headers('DescribeBackup'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeContinuousBackups.html const DescribeContinuousBackups = { + awsDoc: docRoot + 'API_DescribeContinuousBackups.html', validate: { TableName, }, @@ -255,10 +263,11 @@ const DescribeContinuousBackups = { headers: headers('DescribeContinuousBackups'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeContributorInsights.html const DescribeContributorInsights = { + awsDoc: docRoot + 'API_DescribeContributorInsights.html', validate: { TableName, IndexName: str, @@ -267,17 +276,19 @@ const DescribeContributorInsights = { headers: headers('DescribeContributorInsights'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeEndpoints.html const DescribeEndpoints = { + awsDoc: docRoot + 'API_DescribeEndpoints.html', request: async () => ({ headers: headers('DescribeEndpoints'), }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeExport.html const DescribeExport = { + awsDoc: docRoot + 'API_DescribeExport.html', validate: { ExportArn: { ...str, required }, }, @@ -285,10 +296,11 @@ const DescribeExport = { headers: headers('DescribeExport'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeGlobalTable.html const DescribeGlobalTable = { + awsDoc: docRoot + 'API_DescribeGlobalTable.html', validate: { GlobalTableName: { ...str, required }, }, @@ -296,10 +308,11 @@ const DescribeGlobalTable = { headers: headers('DescribeGlobalTable'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeGlobalTableSettings.html const DescribeGlobalTableSettings = { + awsDoc: docRoot + 'API_DescribeGlobalTableSettings.html', validate: { GlobalTableName: { ...str, required }, }, @@ -307,10 +320,11 @@ const DescribeGlobalTableSettings = { headers: headers('DescribeGlobalTableSettings'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeImport.html const DescribeImport = { + awsDoc: docRoot + 'API_DescribeImport.html', validate: { ImportArn: { ...str, required }, }, @@ -318,10 +332,11 @@ const DescribeImport = { headers: headers('DescribeImport'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeKinesisStreamingDestination.html const DescribeKinesisStreamingDestination = { + awsDoc: docRoot + 'API_DescribeKinesisStreamingDestination.html', validate: { TableName, }, @@ -329,17 +344,19 @@ const DescribeKinesisStreamingDestination = { headers: headers('DescribeKinesisStreamingDestination'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeLimits.html const DescribeLimits = { + awsDoc: docRoot + 'API_DescribeLimits.html', request: async () => ({ headers: headers('DescribeLimits'), }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeTable.html const DescribeTable = { + awsDoc: docRoot + 'API_DescribeTable.html', validate: { TableName, }, @@ -347,10 +364,11 @@ const DescribeTable = { headers: headers('DescribeTable'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeTableReplicaAutoScaling.html const DescribeTableReplicaAutoScaling = { + awsDoc: docRoot + 'API_DescribeTableReplicaAutoScaling.html', validate: { TableName, }, @@ -358,10 +376,11 @@ const DescribeTableReplicaAutoScaling = { headers: headers('DescribeTableReplicaAutoScaling'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeTimeToLive.html const DescribeTimeToLive = { + awsDoc: docRoot + 'API_DescribeTimeToLive.html', validate: { TableName, }, @@ -369,10 +388,11 @@ const DescribeTimeToLive = { headers: headers('DescribeTimeToLive'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DisableKinesisStreamingDestination.html const DisableKinesisStreamingDestination = { + awsDoc: docRoot + 'API_DisableKinesisStreamingDestination.html', validate: { TableName, StreamArn: { ...str, required }, @@ -381,10 +401,11 @@ const DisableKinesisStreamingDestination = { headers: headers('DisableKinesisStreamingDestination'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_EnableKinesisStreamingDestination.html const EnableKinesisStreamingDestination = { + awsDoc: docRoot + 'API_EnableKinesisStreamingDestination.html', validate: { TableName, StreamArn: { ...str, required }, @@ -393,10 +414,11 @@ const EnableKinesisStreamingDestination = { headers: headers('EnableKinesisStreamingDestination'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ExecuteStatement.html const ExecuteStatement = { + awsDoc: docRoot + 'API_ExecuteStatement.html', validate: { TableName, Statement: { ...str, required }, @@ -414,16 +436,17 @@ const ExecuteStatement = { payload: params, } }, - response: async (response, { awsjsonUnmarshall }) => { - if (response?.Items?.length) { - response.Items = response.Items.map(awsjsonUnmarshall) + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (payload?.Items?.length) { + payload.Items = payload.Items.map(awsjsonUnmarshall) } - return { awsjson: [ 'LastEvaluatedKey' ], response } + payload.awsjson = [ 'LastEvaluatedKey' ] + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ExecuteTransaction.html const ExecuteTransaction = { + awsDoc: docRoot + 'API_ExecuteTransaction.html', validate: { TableName, TransactStatements: { ...arr, required }, @@ -442,19 +465,19 @@ const ExecuteTransaction = { payload: params, } }, - response: async (response, { awsjsonUnmarshall }) => { - if (response?.Responses?.length) { - response.Responses = response.Responses.map(i => { + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (payload?.Responses?.length) { + payload.Responses = payload.Responses.map(i => { i.Item = awsjsonUnmarshall(i.Item) return i }) } - return { response } + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ExportTableToPointInTime.html const ExportTableToPointInTime = { + awsDoc: docRoot + 'API_ExportTableToPointInTime.html', validate: { S3Bucket: { ...str, required }, TableArn: { ...str, required }, @@ -470,10 +493,11 @@ const ExportTableToPointInTime = { headers: headers('ExportTableToPointInTime'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_GetItem.html const GetItem = { + awsDoc: docRoot + 'API_GetItem.html', validate: { TableName, Key, @@ -491,8 +515,8 @@ const GetItem = { response: unmarshall(awsjsonRes), } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ImportTable.html const ImportTable = { + awsDoc: docRoot + 'API_ImportTable.html', validate: { InputFormat: { ...str, required }, S3BucketSource: { ...obj, required }, @@ -505,10 +529,11 @@ const ImportTable = { headers: headers('ImportTable'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListBackups.html const ListBackups = { + awsDoc: docRoot + 'API_ListBackups.html', validate: { BackupType: str, ExclusiveStartBackupArn: str, @@ -521,10 +546,11 @@ const ListBackups = { headers: headers('ListBackups'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListContributorInsights.html const ListContributorInsights = { + awsDoc: docRoot + 'API_ListContributorInsights.html', validate: { MaxResults: num, NextToken: str, @@ -534,10 +560,11 @@ const ListContributorInsights = { headers: headers('ListContributorInsights'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListExports.html const ListExports = { + awsDoc: docRoot + 'API_ListExports.html', validate: { MaxResults: num, NextToken: str, @@ -547,10 +574,11 @@ const ListExports = { headers: headers('ListExports'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListGlobalTables.html const ListGlobalTables = { + awsDoc: docRoot + 'API_ListGlobalTables.html', validate: { ExclusiveStartGlobalTableName: str, Limit: num, @@ -560,10 +588,11 @@ const ListGlobalTables = { headers: headers('ListGlobalTables'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListImports.html const ListImports = { + awsDoc: docRoot + 'API_ListImports.html', validate: { NextToken: str, PageSize: num, @@ -573,10 +602,11 @@ const ListImports = { headers: headers('ListImports'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListTables.html const ListTables = { + awsDoc: docRoot + 'API_ListTables.html', validate: { ExclusiveStartTableName: str, Limit: num, @@ -585,10 +615,11 @@ const ListTables = { headers: headers('ListTables'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListTagsOfResource.html const ListTagsOfResource = { + awsDoc: docRoot + 'API_ListTagsOfResource.html', validate: { NextToken: str, ResourceArn: { ...str, required }, @@ -597,10 +628,11 @@ const ListTagsOfResource = { headers: headers('ListTagsOfResource'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_PutItem.html const PutItem = { + awsDoc: docRoot + 'API_PutItem.html', validate: { TableName, Item, @@ -622,8 +654,8 @@ const PutItem = { response: unmarshall([ 'Attributes', ]), } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Query.html const Query = { + awsDoc: docRoot + 'API_Query.html', validate: { TableName, AttributesToGet: arr, @@ -648,18 +680,18 @@ const Query = { headers: headers('Query'), payload: params, }), - response: async (response, { awsjsonUnmarshall }) => { - if (response?.Items?.length) response.Items = response.Items.map(awsjsonUnmarshall) - if (response?.LastEvaluatedKey) { - let key = response.LastEvaluatedKey[Object.keys(response.LastEvaluatedKey)[0]] - response.LastEvaluatedKey = awsjsonUnmarshall(key) + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (payload?.Items?.length) payload.Items = payload.Items.map(awsjsonUnmarshall) + if (payload?.LastEvaluatedKey) { + let key = payload.LastEvaluatedKey[Object.keys(payload.LastEvaluatedKey)[0]] + payload.LastEvaluatedKey = awsjsonUnmarshall(key) } - return { response } + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_RestoreTableFromBackup.html const RestoreTableFromBackup = { + awsDoc: docRoot + 'API_RestoreTableFromBackup.html', validate: { BackupArn: { ...str, required }, TargetTableName: { ...str, required }, @@ -673,10 +705,11 @@ const RestoreTableFromBackup = { headers: headers('RestoreTableFromBackup'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_RestoreTableToPointInTime.html const RestoreTableToPointInTime = { + awsDoc: docRoot + 'API_RestoreTableToPointInTime.html', validate: { TargetTableName: { ...str, required }, BillingModeOverride: str, @@ -693,10 +726,11 @@ const RestoreTableToPointInTime = { headers: headers('RestoreTableToPointInTime'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Scan.html const Scan = { + awsDoc: docRoot + 'API_Scan.html', validate: { TableName, AttributesToGet: arr, @@ -719,14 +753,14 @@ const Scan = { headers: headers('Scan'), payload: params, }), - response: async (response, { awsjsonUnmarshall }) => { - if (response?.Items?.length) response.Items = response.Items.map(awsjsonUnmarshall) - return { response } + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (payload?.Items?.length) payload.Items = payload.Items.map(awsjsonUnmarshall) + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_TagResource.html const TagResource = { + awsDoc: docRoot + 'API_TagResource.html', validate: { ResourceArn: { ...str, required }, Tags: { ...arr, required }, @@ -735,10 +769,11 @@ const TagResource = { headers: headers('TagResource'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_TransactGetItems.html const TransactGetItems = { + awsDoc: docRoot + 'API_TransactGetItems.html', validate: { TransactItems: arr, ReturnConsumedCapacity: str, @@ -754,17 +789,17 @@ const TransactGetItems = { payload: params, } }, - response: async (response, { awsjsonUnmarshall }) => { - if (response?.Responses?.length) response.Responses = response.Responses.map(i => { + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (payload?.Responses?.length) payload.Responses = payload.Responses.map(i => { i.Item = awsjsonUnmarshall(i.Item) return i }) - return { response } + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_TransactWriteItems.html const TransactWriteItems = { + awsDoc: docRoot + 'API_TransactWriteItems.html', validate: { TransactItems: arr, ClientRequestToken: str, @@ -814,20 +849,20 @@ const TransactWriteItems = { payload: params, } }, - response: async (response, { awsjsonUnmarshall }) => { - if (Object.keys(response?.ItemCollectionMetrics || {})?.length) { - Object.entries(response.ItemCollectionMetrics).forEach(([ table, items ]) => { - response.ItemCollectionMetrics[table] = items.map(i => { + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (Object.keys(payload?.ItemCollectionMetrics || {})?.length) { + Object.entries(payload.ItemCollectionMetrics).forEach(([ table, items ]) => { + payload.ItemCollectionMetrics[table] = items.map(i => { i.ItemCollectionKey = awsjsonUnmarshall(i.ItemCollectionKey) }) }) } - return { response } + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UntagResource.html const UntagResource = { + awsDoc: docRoot + 'API_UntagResource.html', validate: { ResourceArn: { ...str, required }, TagKeys: { ...arr, required }, @@ -836,10 +871,11 @@ const UntagResource = { headers: headers('UntagResource'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateContinuousBackups.html const UpdateContinuousBackups = { + awsDoc: docRoot + 'API_UpdateContinuousBackups.html', validate: { TableName, PointInTimeRecoverySpecification: obj, @@ -848,10 +884,11 @@ const UpdateContinuousBackups = { headers: headers('UpdateContinuousBackups'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateContributorInsights.html const UpdateContributorInsights = { + awsDoc: docRoot + 'API_UpdateContributorInsights.html', validate: { TableName, ContributorInsightsAction: str, @@ -861,10 +898,11 @@ const UpdateContributorInsights = { headers: headers('UpdateContributorInsights'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateGlobalTable.html const UpdateGlobalTable = { + awsDoc: docRoot + 'API_UpdateGlobalTable.html', validate: { GlobalTableName: { ...str, required }, ReplicaUpdates: arr, @@ -873,10 +911,11 @@ const UpdateGlobalTable = { headers: headers('UpdateGlobalTable'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateGlobalTableSettings.html const UpdateGlobalTableSettings = { + awsDoc: docRoot + 'API_UpdateGlobalTableSettings.html', validate: { GlobalTableName: { ...str, required }, GlobalTableBillingMode: str, @@ -889,10 +928,11 @@ const UpdateGlobalTableSettings = { headers: headers('UpdateGlobalTableSettings'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html const UpdateItem = { + awsDoc: docRoot + 'API_UpdateItem.html', validate: { Key, TableName, @@ -913,18 +953,19 @@ const UpdateItem = { headers: headers('UpdateItem'), payload: params, }), - response: async (response, { awsjsonUnmarshall }) => { - if (Object.keys(response?.ItemCollectionMetrics || {})?.length) { - Object.entries(response.ItemCollectionMetrics.ItemCollectionKey).forEach(([ key, props ]) => { - response.ItemCollectionMetrics.ItemCollectionKey[key] = awsjsonUnmarshall(props) + response: async ({ payload }, { awsjsonUnmarshall }) => { + if (Object.keys(payload?.ItemCollectionMetrics || {})?.length) { + Object.entries(payload.ItemCollectionMetrics.ItemCollectionKey).forEach(([ key, props ]) => { + payload.ItemCollectionMetrics.ItemCollectionKey[key] = awsjsonUnmarshall(props) }) } - return { awsjson: awsjsonRes, response } + payload.awsjson = awsjsonRes + return payload }, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTable.html const UpdateTable = { + awsDoc: docRoot + 'API_UpdateTable.html', validate: { TableName, AttributeDefinitions: arr, @@ -941,10 +982,11 @@ const UpdateTable = { headers: headers('UpdateTable'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTableReplicaAutoScaling.html const UpdateTableReplicaAutoScaling = { + awsDoc: docRoot + 'API_UpdateTableReplicaAutoScaling.html', validate: { TableName, GlobalSecondaryIndexUpdates: arr, @@ -955,10 +997,11 @@ const UpdateTableReplicaAutoScaling = { headers: headers('UpdateTableReplicaAutoScaling'), payload: params, }), + response: defaultResponse, } -// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTimeToLive.html const UpdateTimeToLive = { + awsDoc: docRoot + 'API_UpdateTimeToLive.html', validate: { TableName, TimeToLiveSpecification: obj, @@ -967,6 +1010,7 @@ const UpdateTimeToLive = { headers: headers('UpdateTimeToLive'), payload: params, }), + response: defaultResponse, } const methods = { BatchExecuteStatement, BatchGetItem, BatchWriteItem, CreateBackup, CreateGlobalTable, CreateTable, DeleteBackup, DeleteItem, DeleteTable, DescribeBackup, DescribeContinuousBackups, DescribeContributorInsights, DescribeEndpoints, DescribeExport, DescribeGlobalTable, DescribeGlobalTableSettings, DescribeImport, DescribeKinesisStreamingDestination, DescribeLimits, DescribeTable, DescribeTableReplicaAutoScaling, DescribeTimeToLive, DisableKinesisStreamingDestination, EnableKinesisStreamingDestination, ExecuteStatement, ExecuteTransaction, ExportTableToPointInTime, GetItem, ImportTable, ListBackups, ListContributorInsights, ListExports, ListGlobalTables, ListImports, ListTables, ListTagsOfResource, PutItem, Query, RestoreTableFromBackup, RestoreTableToPointInTime, Scan, TagResource, TransactGetItems, TransactWriteItems, UntagResource, UpdateContinuousBackups, UpdateContributorInsights, UpdateGlobalTable, UpdateGlobalTableSettings, UpdateItem, UpdateTable, UpdateTableReplicaAutoScaling, UpdateTimeToLive } diff --git a/plugins/s3/package.json b/plugins/s3/package.json new file mode 100644 index 00000000..7a520ef6 --- /dev/null +++ b/plugins/s3/package.json @@ -0,0 +1,21 @@ +{ + "name": "@aws-lite/s3", + "version": "0.0.0", + "description": "Official `aws-lite` plugin for S3", + "homepage": "https://github.com/architect/aws-lite", + "repository": { + "type": "git", + "url": "https://github.com/architect/aws-lite", + "directory": "plugins/s3" + }, + "bugs": "https://github.com/architect/aws-lite/issues", + "main": "src/index.mjs", + "engines": { + "node": ">=16" + }, + "author": "@architect", + "license": "Apache-2.0", + "files": [ + "src" + ] +} \ No newline at end of file diff --git a/plugins/s3/readme.md b/plugins/s3/readme.md new file mode 100644 index 00000000..f4248124 --- /dev/null +++ b/plugins/s3/readme.md @@ -0,0 +1,289 @@ +# `@aws-lite/s3` + +> Official `aws-lite` plugin for S3 + +> Maintained by: [@architect](https://github.com/architect) + + +## Install + +```sh +npm i @aws-lite/s3 +``` + + +## Methods + + + +### `GetObject` + +[Canonical AWS API doc](https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html) + +Properties: +- **`Bucket` (string) [required]** + - S3 bucket name +- **`Key` (string) [required]** + - S3 key / file name +- **`PartNumber` (number)** + - Part number (between 1 - 10,000) of the object +- **`VersionId` (string)** + - Reference a specific version of the object +- **`IfMatch` (string)** + - Sets request header: `if-match` +- **`IfModifiedSince` (string)** + - Sets request header: `if-modified-since` +- **`IfNoneMatch` (string)** + - Sets request header: `if-none-match` +- **`IfUnmodifiedSince` (string)** + - Sets request header: `if-unmodified-since` +- **`Range` (string)** + - Sets request header: `range` +- **`SSECustomerAlgorithm` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-algorithm` +- **`SSECustomerKey` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-key` +- **`SSECustomerKeyMD5` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-key-md5` +- **`RequestPayer` (string)** + - Sets request header: `x-amz-request-payer` +- **`ExpectedBucketOwner` (string)** + - Sets request header: `x-amz-expected-bucket-owner` +- **`ChecksumMode` (string)** + - Sets request header: `x-amz-checksum-mode` +- **`ResponseCacheControl` (string)** + - Sets response header: `cache-control` +- **`ResponseContentDisposition` (string)** + - Sets response header: `content-disposition` +- **`ResponseContentEncoding` (string)** + - Sets response header: `content-encoding` +- **`ResponseContentLanguage` (string)** + - Sets response header: `content-language` +- **`ResponseContentType` (string)** + - Sets response header: `content-type` +- **`ResponseExpires` (string)** + - Sets response header: `expires` + + +### `HeadObject` + +[Canonical AWS API doc](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html) + +Properties: +- **`Bucket` (string) [required]** + - S3 bucket name +- **`Key` (string) [required]** + - S3 key / file name +- **`PartNumber` (number)** + - Part number (between 1 - 10,000) of the object +- **`VersionId` (string)** + - Reference a specific version of the object +- **`IfMatch` (string)** + - Sets request header: `if-match` +- **`IfModifiedSince` (string)** + - Sets request header: `if-modified-since` +- **`IfNoneMatch` (string)** + - Sets request header: `if-none-match` +- **`IfUnmodifiedSince` (string)** + - Sets request header: `if-unmodified-since` +- **`Range` (string)** + - Sets request header: `range` +- **`SSECustomerAlgorithm` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-algorithm` +- **`SSECustomerKey` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-key` +- **`SSECustomerKeyMD5` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-key-md5` +- **`RequestPayer` (string)** + - Sets request header: `x-amz-request-payer` +- **`ExpectedBucketOwner` (string)** + - Sets request header: `x-amz-expected-bucket-owner` +- **`ChecksumMode` (string)** + - Sets request header: `x-amz-checksum-mode` + + +### `PutObject` + +[Canonical AWS API doc](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html) + +Properties: +- **`Bucket` (string) [required]** + - S3 bucket name +- **`Key` (string) [required]** + - S3 key / file name +- **`File` (string) [required]** + - File path to be read and uploaded from the local filesystem +- **`MinChunkSize` (number)** + - Minimum size (in bytes) to utilize AWS-chunk-encoded uploads to S3 +- **`ACL` (string)** + - Sets request header: `x-amz-acl` +- **`BucketKeyEnabled` (string)** + - Sets request header: `x-amz-server-side-encryption-bucket-key-enabled` +- **`CacheControl` (string)** + - Sets request header: `cache-control` +- **`ChecksumAlgorithm` (string)** + - Sets request header: `x-amz-sdk-checksum-algorithm` +- **`ChecksumCRC32` (string)** + - Sets request header: `x-amz-checksum-crc32` +- **`ChecksumCRC32C` (string)** + - Sets request header: `x-amz-checksum-crc32c` +- **`ChecksumSHA1` (string)** + - Sets request header: `x-amz-checksum-sha1` +- **`ChecksumSHA256` (string)** + - Sets request header: `x-amz-checksum-sha256` +- **`ContentDisposition` (string)** + - Sets request header: `content-disposition` +- **`ContentEncoding` (string)** + - Sets request header: `content-encoding` +- **`ContentLanguage` (string)** + - Sets request header: `content-language` +- **`ContentLength` (string)** + - Sets request header: `content-length` +- **`ContentMD5` (string)** + - Sets request header: `content-md5` +- **`ContentType` (string)** + - Sets request header: `content-type` +- **`ExpectedBucketOwner` (string)** + - Sets request header: `x-amz-expected-bucket-owner` +- **`Expires` (string)** + - Sets request header: `expires` +- **`GrantFullControl` (string)** + - Sets request header: `x-amz-grant-full-control` +- **`GrantRead` (string)** + - Sets request header: `x-amz-grant-read` +- **`GrantReadACP` (string)** + - Sets request header: `x-amz-grant-read-acp` +- **`GrantWriteACP` (string)** + - Sets request header: `x-amz-grant-write-acp` +- **`ObjectLockLegalHoldStatus` (string)** + - Sets request header: `x-amz-object-lock-legal-hold` +- **`ObjectLockMode` (string)** + - Sets request header: `x-amz-object-lock-mode` +- **`ObjectLockRetainUntilDate` (string)** + - Sets request header: `x-amz-object-lock-retain-until-date` +- **`RequestPayer` (string)** + - Sets request header: `x-amz-request-payer` +- **`ServerSideEncryption` (string)** + - Sets request header: `x-amz-server-side-encryption` +- **`SSECustomerAlgorithm` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-algorithm` +- **`SSECustomerKey` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-key` +- **`SSECustomerKeyMD5` (string)** + - Sets request header: `x-amz-server-side-encryption-customer-key-md5` +- **`SSEKMSEncryptionContext` (string)** + - Sets request header: `x-amz-server-side-encryption-context` +- **`SSEKMSKeyId` (string)** + - Sets request header: `x-amz-server-side-encryption-aws-kms-key-id` +- **`StorageClass` (string)** + - Sets request header: `x-amz-storage-class` +- **`Tagging` (string)** + - Sets request header: `x-amz-tagging` +- **`WebsiteRedirectLocation` (string)** + - Sets request header: `x-amz-website-redirect-location` + + +### Methods yet to be implemented + +> Please help out by [opening a PR](https://github.com/architect/aws-lite#authoring-aws-lite-plugins)! + +- `AbortMultipartUpload` +- `CompleteMultipartUpload` +- `CopyObject` +- `CreateBucket` +- `CreateMultipartUpload` +- `DeleteBucket` +- `DeleteBucketAnalyticsConfiguration` +- `DeleteBucketCors` +- `DeleteBucketEncryption` +- `DeleteBucketIntelligentTieringConfiguration` +- `DeleteBucketInventoryConfiguration` +- `DeleteBucketLifecycle` +- `DeleteBucketMetricsConfiguration` +- `DeleteBucketOwnershipControls` +- `DeleteBucketPolicy` +- `DeleteBucketReplication` +- `DeleteBucketTagging` +- `DeleteBucketWebsite` +- `DeleteObject` +- `DeleteObjects` +- `DeleteObjectTagging` +- `DeletePublicAccessBlock` +- `GetBucketAccelerateConfiguration` +- `GetBucketAcl` +- `GetBucketAnalyticsConfiguration` +- `GetBucketCors` +- `GetBucketEncryption` +- `GetBucketIntelligentTieringConfiguration` +- `GetBucketInventoryConfiguration` +- `GetBucketLifecycle` +- `GetBucketLifecycleConfiguration` +- `GetBucketLocation` +- `GetBucketLogging` +- `GetBucketMetricsConfiguration` +- `GetBucketNotification` +- `GetBucketNotificationConfiguration` +- `GetBucketOwnershipControls` +- `GetBucketPolicy` +- `GetBucketPolicyStatus` +- `GetBucketReplication` +- `GetBucketRequestPayment` +- `GetBucketTagging` +- `GetBucketVersioning` +- `GetBucketWebsite` +- `GetObjectAcl` +- `GetObjectAttributes` +- `GetObjectLegalHold` +- `GetObjectLockConfiguration` +- `GetObjectRetention` +- `GetObjectTagging` +- `GetObjectTorrent` +- `GetPublicAccessBlock` +- `HeadBucket` +- `ListBucketAnalyticsConfigurations` +- `ListBucketIntelligentTieringConfigurations` +- `ListBucketInventoryConfigurations` +- `ListBucketMetricsConfigurations` +- `ListBuckets` +- `ListMultipartUploads` +- `ListObjects` +- `ListObjectsV2` +- `ListObjectVersions` +- `ListParts` +- `PutBucketAccelerateConfiguration` +- `PutBucketAcl` +- `PutBucketAnalyticsConfiguration` +- `PutBucketCors` +- `PutBucketEncryption` +- `PutBucketIntelligentTieringConfiguration` +- `PutBucketInventoryConfiguration` +- `PutBucketLifecycle` +- `PutBucketLifecycleConfiguration` +- `PutBucketLogging` +- `PutBucketMetricsConfiguration` +- `PutBucketNotification` +- `PutBucketNotificationConfiguration` +- `PutBucketOwnershipControls` +- `PutBucketPolicy` +- `PutBucketReplication` +- `PutBucketRequestPayment` +- `PutBucketTagging` +- `PutBucketVersioning` +- `PutBucketWebsite` +- `PutObjectAcl` +- `PutObjectLegalHold` +- `PutObjectLockConfiguration` +- `PutObjectRetention` +- `PutObjectTagging` +- `PutPublicAccessBlock` +- `RestoreObject` +- `SelectObjectContent` +- `UploadPart` +- `UploadPartCopy` +- `WriteGetObjectResponse` + + + +## Learn more + +Please see the [main `aws-lite` readme](https://github.com/architect/aws-lite) for more information about `aws-lite` plugins. diff --git a/plugins/s3/src/incomplete.mjs b/plugins/s3/src/incomplete.mjs new file mode 100644 index 00000000..9eac35d5 --- /dev/null +++ b/plugins/s3/src/incomplete.mjs @@ -0,0 +1,2 @@ +const x = false +export default { AbortMultipartUpload: x, CompleteMultipartUpload: x, CopyObject: x, CreateBucket: x, CreateMultipartUpload: x, DeleteBucket: x, DeleteBucketAnalyticsConfiguration: x, DeleteBucketCors: x, DeleteBucketEncryption: x, DeleteBucketIntelligentTieringConfiguration: x, DeleteBucketInventoryConfiguration: x, DeleteBucketLifecycle: x, DeleteBucketMetricsConfiguration: x, DeleteBucketOwnershipControls: x, DeleteBucketPolicy: x, DeleteBucketReplication: x, DeleteBucketTagging: x, DeleteBucketWebsite: x, DeleteObject: x, DeleteObjects: x, DeleteObjectTagging: x, DeletePublicAccessBlock: x, GetBucketAccelerateConfiguration: x, GetBucketAcl: x, GetBucketAnalyticsConfiguration: x, GetBucketCors: x, GetBucketEncryption: x, GetBucketIntelligentTieringConfiguration: x, GetBucketInventoryConfiguration: x, GetBucketLifecycle: x, GetBucketLifecycleConfiguration: x, GetBucketLocation: x, GetBucketLogging: x, GetBucketMetricsConfiguration: x, GetBucketNotification: x, GetBucketNotificationConfiguration: x, GetBucketOwnershipControls: x, GetBucketPolicy: x, GetBucketPolicyStatus: x, GetBucketReplication: x, GetBucketRequestPayment: x, GetBucketTagging: x, GetBucketVersioning: x, GetBucketWebsite: x, GetObjectAcl: x, GetObjectAttributes: x, GetObjectLegalHold: x, GetObjectLockConfiguration: x, GetObjectRetention: x, GetObjectTagging: x, GetObjectTorrent: x, GetPublicAccessBlock: x, HeadBucket: x, ListBucketAnalyticsConfigurations: x, ListBucketIntelligentTieringConfigurations: x, ListBucketInventoryConfigurations: x, ListBucketMetricsConfigurations: x, ListBuckets: x, ListMultipartUploads: x, ListObjects: x, ListObjectsV2: x, ListObjectVersions: x, ListParts: x, PutBucketAccelerateConfiguration: x, PutBucketAcl: x, PutBucketAnalyticsConfiguration: x, PutBucketCors: x, PutBucketEncryption: x, PutBucketIntelligentTieringConfiguration: x, PutBucketInventoryConfiguration: x, PutBucketLifecycle: x, PutBucketLifecycleConfiguration: x, PutBucketLogging: x, PutBucketMetricsConfiguration: x, PutBucketNotification: x, PutBucketNotificationConfiguration: x, PutBucketOwnershipControls: x, PutBucketPolicy: x, PutBucketReplication: x, PutBucketRequestPayment: x, PutBucketTagging: x, PutBucketVersioning: x, PutBucketWebsite: x, PutObjectAcl: x, PutObjectLegalHold: x, PutObjectLockConfiguration: x, PutObjectRetention: x, PutObjectTagging: x, PutPublicAccessBlock: x, RestoreObject: x, SelectObjectContent: x, UploadPart: x, UploadPartCopy: x, WriteGetObjectResponse: x } diff --git a/plugins/s3/src/index.mjs b/plugins/s3/src/index.mjs new file mode 100644 index 00000000..04bb7133 --- /dev/null +++ b/plugins/s3/src/index.mjs @@ -0,0 +1,75 @@ +import incomplete from './incomplete.mjs' +import lib from './lib.mjs' +const { getValidateHeaders, getHeadersFromParams, getQueryFromParams, parseHeadersToResults } = lib +import PutObject from './put-object.mjs' + +const service = 's3' +const required = true + +/** + * Plugin maintained by: @architect + */ + +const GetObject = { + awsDoc: 'https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html', + validate: { + Bucket: { type: 'string', required, comment: 'S3 bucket name' }, + Key: { type: 'string', required, comment: 'S3 key / file name' }, + PartNumber: { type: 'number', comment: 'Part number (between 1 - 10,000) of the object' }, + VersionId: { type: 'string', comment: 'Reference a specific version of the object' }, + // Here come the headers + ...getValidateHeaders('IfMatch', 'IfModifiedSince', 'IfNoneMatch', 'IfUnmodifiedSince', + 'Range', 'SSECustomerAlgorithm', 'SSECustomerKey', 'SSECustomerKeyMD5', 'RequestPayer', + 'ExpectedBucketOwner', 'ChecksumMode'), + ResponseCacheControl: { type: 'string', comment: 'Sets response header: `cache-control`' }, + ResponseContentDisposition: { type: 'string', comment: 'Sets response header: `content-disposition`' }, + ResponseContentEncoding: { type: 'string', comment: 'Sets response header: `content-encoding`' }, + ResponseContentLanguage: { type: 'string', comment: 'Sets response header: `content-language`' }, + ResponseContentType: { type: 'string', comment: 'Sets response header: `content-type`' }, + ResponseExpires: { type: 'string', comment: 'Sets response header: `expires`' }, + }, + request: async (params) => { + let { Bucket, Key } = params + let queryParams = [ 'PartNumber', 'ResponseCacheControl', 'ResponseContentDisposition', + 'ResponseContentEncoding', 'ResponseContentLanguage', 'ResponseContentType', + 'ResponseExpires', 'VersionId' ] + let headers = getHeadersFromParams(params, queryParams) + let query = getQueryFromParams(params, queryParams) + return { + endpoint: `/${Bucket}/${Key}`, + headers, + query, + } + }, + response: ({ payload }) => payload, +} + +const HeadObject = { + awsDoc: 'https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html', + validate: { + Bucket: { type: 'string', required, comment: 'S3 bucket name' }, + Key: { type: 'string', required, comment: 'S3 key / file name' }, + PartNumber: { type: 'number', comment: 'Part number (between 1 - 10,000) of the object' }, + VersionId: { type: 'string', comment: 'Reference a specific version of the object' }, + // Here come the headers + ...getValidateHeaders('IfMatch', 'IfModifiedSince', 'IfNoneMatch', 'IfUnmodifiedSince', + 'Range', 'SSECustomerAlgorithm', 'SSECustomerKey', 'SSECustomerKeyMD5', 'RequestPayer', + 'ExpectedBucketOwner', 'ChecksumMode'), + }, + request: async (params) => { + let { Bucket, Key } = params + let queryParams = [ 'PartNumber', 'VersionId' ] + let headers = getHeadersFromParams(params, queryParams) + let query = getQueryFromParams(params, queryParams) + return { + endpoint: `/${Bucket}/${Key}`, + method: 'HEAD', + headers, + query, + } + }, + response: parseHeadersToResults, +} + +const methods = { GetObject, HeadObject, PutObject, ...incomplete } +export default { service, methods } diff --git a/plugins/s3/src/lib.mjs b/plugins/s3/src/lib.mjs new file mode 100644 index 00000000..9354e696 --- /dev/null +++ b/plugins/s3/src/lib.mjs @@ -0,0 +1,113 @@ +// Generate validation for commonly used headers +const getValidateHeaders = (...headers) => headers.reduce((acc, h) => { + if (!headerMappings[h]) throw ReferenceError(`Header not found: ${h}`) + acc[h] = { type: 'string', comment: comment(headerMappings[h]) } + return acc +}, {}) +const comment = header => `Sets request header: \`${header}\`` + +// Map AWS-named S3 params to their respective headers +// The !x-amz headers are documented by AWS as old school pascal-case headers; lowcasing them to be HTTP 2.0 compliant +const headerMappings = { + AcceptRanges: 'accept-ranges', + ACL: 'x-amz-acl', + ArchiveStatus: 'x-amz-archive-status', + BucketKeyEnabled: 'x-amz-server-side-encryption-bucket-key-enabled', + CacheControl: 'cache-control', + ChecksumAlgorithm: 'x-amz-sdk-checksum-algorithm', + ChecksumCRC32: 'x-amz-checksum-crc32', + ChecksumCRC32C: 'x-amz-checksum-crc32c', + ChecksumMode: 'x-amz-checksum-mode', + ChecksumSHA1: 'x-amz-checksum-sha1', + ChecksumSHA256: 'x-amz-checksum-sha256', + ContentDisposition: 'content-disposition', + ContentEncoding: 'content-encoding', + ContentLanguage: 'content-language', + ContentLength: 'content-length', + ContentMD5: 'content-md5', + ContentType: 'content-type', + DeleteMarker: 'x-amz-delete-marker', + ETag: 'etag', + ExpectedBucketOwner: 'x-amz-expected-bucket-owner', + Expiration: 'x-amz-expiration', + Expires: 'expires', + GrantFullControl: 'x-amz-grant-full-control', + GrantRead: 'x-amz-grant-read', + GrantReadACP: 'x-amz-grant-read-acp', + GrantWriteACP: 'x-amz-grant-write-acp', + IfMatch: 'if-match', + IfModifiedSince: 'if-modified-since', + IfNoneMatch: 'if-none-match', + IfUnmodifiedSince: 'if-unmodified-since', + LastModified: 'Last-Modified', + MissingMeta: 'x-amz-missing-meta', + ObjectLockLegalHoldStatus: 'x-amz-object-lock-legal-hold', + ObjectLockMode: 'x-amz-object-lock-mode', + ObjectLockRetainUntilDate: 'x-amz-object-lock-retain-until-date', + PartsCount: 'x-amz-mp-parts-count', + Range: 'range', + ReplicationStatus: 'x-amz-replication-status', + RequestCharged: 'x-amz-request-charged', + RequestPayer: 'x-amz-request-payer', + Restore: 'x-amz-restore', + ServerSideEncryption: 'x-amz-server-side-encryption', + SSECustomerAlgorithm: 'x-amz-server-side-encryption-customer-algorithm', + SSECustomerKey: 'x-amz-server-side-encryption-customer-key', + SSECustomerKeyMD5: 'x-amz-server-side-encryption-customer-key-md5', + SSEKMSEncryptionContext: 'x-amz-server-side-encryption-context', + SSEKMSKeyId: 'x-amz-server-side-encryption-aws-kms-key-id', + StorageClass: 'x-amz-storage-class', + Tagging: 'x-amz-tagging', + VersionId: 'x-amz-version-id', + WebsiteRedirectLocation: 'x-amz-website-redirect-location', +} +// Invert headerMappings for header-based lookups +const paramMappings = Object.fromEntries(Object.entries(headerMappings).map(([ k, v ]) => [ v, k ])) + +// Take a response, and parse its headers into the AWS-named params of headerMappings +const quoted = /^".*"$/ +const ignoreHeaders = [ 'content-length' ] +const parseHeadersToResults = ({ headers }) => { + let results = Object.entries(headers).reduce((acc, [ header, value ]) => { + const normalized = header.toLowerCase() + if (value === 'true') value = true + if (value === 'false') value = false + if (value.match(quoted)) { + value = value.substring(1, value.length - 1) + } + if (paramMappings[normalized] && !ignoreHeaders.includes(normalized)) { + acc[paramMappings[normalized]] = value + } + return acc + }, {}) + return results +} + +function getHeadersFromParams (params, ignore = []) { + let headers = Object.keys(params).reduce((acc, param) => { + if (headerMappings[param] && !ignore.includes(param)) { + acc[headerMappings[param]] = params[param] + } + return acc + }, {}) + return headers +} + +function getQueryFromParams (params, queryParams) { + let query + queryParams.forEach(p => { + if (params[p]) { + if (!query) query = {} + query[p] = params[p] + } + }) + return query +} + +export default { + getValidateHeaders, + getHeadersFromParams, + getQueryFromParams, + headerMappings, + parseHeadersToResults, +} diff --git a/plugins/s3/src/put-object.mjs b/plugins/s3/src/put-object.mjs new file mode 100644 index 00000000..1821a168 --- /dev/null +++ b/plugins/s3/src/put-object.mjs @@ -0,0 +1,172 @@ +import aws4 from 'aws4' +import crypto from 'node:crypto' +import { readFile, stat } from 'node:fs/promises' +import { Readable } from 'node:stream' +import lib from './lib.mjs' +const { getHeadersFromParams, getValidateHeaders, parseHeadersToResults } = lib + +const required = true +const chunkBreak = `\r\n` +const minSize = 1024 * 1024 * 5 +const intToHexString = int => String(Number(int).toString(16)) +const algo = 'sha256', utf8 = 'utf8', hex = 'hex' +const hash = str => crypto.createHash(algo).update(str, utf8).digest(hex) +const hmac = (key, str, enc) => crypto.createHmac(algo, key).update(str, utf8).digest(enc) + +function payloadMetadata (chunkSize, signature) { + // Don't forget: after the signature + break would normally follow the body + one more break + return intToHexString(chunkSize) + `;chunk-signature=${signature}` + chunkBreak +} + +const PutObject = { + awsDoc: 'https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html', + // See also: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html + validate: { + Bucket: { type: 'string', required, comment: 'S3 bucket name' }, + Key: { type: 'string', required, comment: 'S3 key / file name' }, + File: { type: 'string', required, comment: 'File path to be read and uploaded from the local filesystem' }, + MinChunkSize: { type: 'number', default: minSize, comment: 'Minimum size (in bytes) to utilize AWS-chunk-encoded uploads to S3' }, + // Here come the headers + ...getValidateHeaders('ACL', 'BucketKeyEnabled', 'CacheControl', 'ChecksumAlgorithm', 'ChecksumCRC32', + 'ChecksumCRC32C', 'ChecksumSHA1', 'ChecksumSHA256', 'ContentDisposition', 'ContentEncoding', + 'ContentLanguage', 'ContentLength', 'ContentMD5', 'ContentType', 'ExpectedBucketOwner', 'Expires', + 'GrantFullControl', 'GrantRead', 'GrantReadACP', 'GrantWriteACP', 'ObjectLockLegalHoldStatus', + 'ObjectLockMode', 'ObjectLockRetainUntilDate', 'RequestPayer', 'ServerSideEncryption', + 'SSECustomerAlgorithm', 'SSECustomerKey', 'SSECustomerKeyMD5', 'SSEKMSEncryptionContext', + 'SSEKMSKeyId', 'StorageClass', 'Tagging', 'WebsiteRedirectLocation') + }, + request: async (params, utils) => { + let { Bucket, Key, File, MinChunkSize } = params + let { credentials, region } = utils + MinChunkSize = MinChunkSize || minSize + + let headers = getHeadersFromParams(params) + + let dataSize + try { + let stats = await stat(File) + dataSize = stats.size + } + catch (err) { + console.log(`Error reading file: ${File}`) + throw err + } + + if (dataSize <= MinChunkSize) { + let payload = await readFile(File) + return { + path: `/${Bucket}/${Key}`, + method: 'PUT', + headers, + payload, + } + } + else { + // We'll assemble file indices of chunks here + let chunks = [ + // Reminder: no payload is sent with the canonical request + { canonicalRequest: true }, + ] + + // We'll need to compute all chunk sizes (including metadata) so that we can get the total content-length for the canonical request + let totalRequestSize = dataSize + let dummySig = 'a'.repeat(64) + let emptyHash = hash('') + + // Multipart uploading requires an extra zero-data chunk to denote completion + let chunkAmount = Math.ceil(dataSize / MinChunkSize) + 1 + + for (let i = 0; i < chunkAmount; i++) { + // Get start end byte position for streaming + let start = i === 0 ? 0 : i * MinChunkSize + let end = (i * MinChunkSize) + MinChunkSize + + let chunk = {}, chunkSize + // The last real chunk + if (end > dataSize) { + end = dataSize + } + // The 0-byte trailing chunk + if (start > dataSize) { + chunkSize = 0 + chunk.finalRequest = true + } + // Normal + else { + chunkSize = end - start + chunk.start = start + chunk.end = end + } + + totalRequestSize += payloadMetadata(chunkSize, dummySig).length + chunkBreak.length + chunks.push({ ...chunk, chunkSize }) + } + + headers = { + ...headers, + 'content-encoding': 'aws-chunked', + 'content-length': totalRequestSize, + 'x-amz-content-sha256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD', + 'x-amz-decoded-content-length': dataSize, + } + let canonicalReq = aws4.sign({ + service: 's3', + region, + method: 'PUT', + path: `/${Bucket}/${Key}`, + headers, + }, credentials) + let seedSignature = canonicalReq.headers.Authorization.split('Signature=')[1] + chunks[0].signature = seedSignature + + let date = canonicalReq.headers['X-Amz-Date'] || + canonicalReq.headers['x-amz-date'] + let yyyymmdd = date.split('T')[0] + let payloadSigHeader = `AWS4-HMAC-SHA256-PAYLOAD\n` + + `${date}\n` + + `${yyyymmdd}/${canonicalReq.region}/s3/aws4_request\n` + + // TODO make this streamable + let data = await readFile(File) + let stream = new Readable() + chunks.forEach((chunk, i) => { + if (chunk.canonicalRequest) return + + // Ideally we'd use start/end with fs.createReadStream + let { start, end } = chunk + let body = chunk.finalRequest ? '' : data.slice(start, end) + let chunkHash = chunk.finalRequest ? emptyHash : hash(body) + + let payloadSigValues = [ + chunks[i - 1].signature, // Previous chunk signature + emptyHash, // Hash of an empty line ¯\_(ツ)_/¯ + chunkHash, // Hash of the current chunk + ].join('\n') + let signing = payloadSigHeader + payloadSigValues + + // lol at this cascade of hmacs + let kDate = hmac('AWS4' + credentials.secretAccessKey, yyyymmdd) + let kRegion = hmac(kDate, region) + let kService = hmac(kRegion, 's3') + let kCredentials = hmac(kService, 'aws4_request') + let chunkSignature = hmac(kCredentials, signing, hex) + + // Important: populate the signature for the next chunk down the line + chunks[i].signature = chunkSignature + + // Now add the chunk to the stream + let part = payloadMetadata(chunk.chunkSize, chunkSignature) + body + chunkBreak + stream.push(part) + + if (chunk.finalRequest) { + stream.push(null) + } + }) + canonicalReq.payload = stream + return canonicalReq + } + }, + response: parseHeadersToResults, +} + +export default PutObject diff --git a/readme.md b/readme.md index ff89fc98..64d75877 100644 --- a/readme.md +++ b/readme.md @@ -11,12 +11,14 @@ - [Usage](#usage) - [Configuration](#configuration) - [Client requests](#client-requests) + - [Client responses](#client-responses) - [Plugins](#plugins) - [Plugin API](#plugin-api) - [`validate`](#validate) - [`request()`](#request) - [`response()`](#response) - [`error()`](#error) + - [Plugin utils](#plugin-utils) - [List of official `@aws-lite/*` plugins](#list-of-official-aws-lite-plugins) - [Contributing](#contributing) - [Setup](#setup) @@ -171,9 +173,13 @@ The following parameters may be passed with individual client requests; only `se - **`headers` (object)** - Header names + values to be added to your request - By default, all headers are included in [authentication via AWS signature v4](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) -- **`payload` (object or string)** + - If your request includes a `payload` that cannot be automatically JSON-encoded and you do not specify a `content-type` header, the default `application/octet-stream` will be used +- **`payload` (object, buffer, readable stream, string)** - Aliases: `body`, `data`, `json` - - As a convenience, any passed objects are automatically JSON-encoded (with the appropriate `content-type` header set, if not already present); strings pass through + - Payload to be used as the HTTP request body; as a convenience, any passed objects are automatically JSON-encoded (with the appropriate `content-type` header set, if not already present); buffers, streams, and strings simply pass through as-is + - Readable streams are currently experimental + - Passing a Node.js readable stream initiates an HTTP data stream to the API endpoint instead of writing a normal HTTP body + - Streams are not automatically signed like normal HTTP bodies, and may [require their own signing procedures, as in S3](https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html) - **`query` (object)** - Serialize the passed object and append it to your `endpoint` as a query string in your request - **`service` (string) [required]** @@ -209,11 +215,53 @@ await awsLite({ ``` +### Client responses + +The following properties are returned with each non-error client response: + +- **`statusCode` (number)** + - HTTP status code of the response +- **`headers` (object)** + - Response header names + values +- **`payload` (object, string, null)** + - Response payload; as a convenience, JSON-encoded responses are automatically parsed; XML-encoded responses are returned as plain strings + - Responses without an HTTP body return a `null` payload + +An example: + +```js +import awsLite from '@aws-lite/client' +const aws = await awsLite() + +await awsLite({ + service: 'lambda', + endpoint: '/2015-03-31/functions/$function-name/configuration', +}) +// { +// statusCode: 200, +// headers: { +// 'content-type': 'application/json', +// 'x-amzn-requestid': 'ba3a55d2-16c2-4c2b-afe1-cf0c5523040b', +// ... +// }, +// payload: { +// FunctionName: '$function-name', +// FunctionArn: 'arn:aws:lambda:us-west-1:1234567890:function:$function-name', +// Role: 'arn:aws:iam::1234567890:role/$function-name-role', +// Runtime: 'nodejs18.x', +// ... +// } +// } +``` + + ## Plugins Out of the box, [`@aws-lite/client`](https://www.npmjs.com/package/@aws-lite/client) is a full-featured AWS API client that you can use to interact with any AWS service that makes use of [authentication via AWS signature v4](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) (which should be just about all of them). -`@aws-lite/client` can be extended with plugins to more easily interact with AWS services. A bit more about how plugins work: +`@aws-lite/client` can be extended with plugins to more easily interact with AWS services, or provide custom behavior or semantics. As such, plugins enable you to have significantly more control over the entire API request/response lifecycle. + +A bit more about how plugins work: - Plugins can be authored in ESM or CJS - Plugins can be dependencies downloaded from npm, or also live locally in your codebase @@ -238,7 +286,7 @@ aws.dynamodb.PutItem({ TableName: 'my-table', Key: { id: 'hello' } }) The `aws-lite` plugin API is lightweight and simple to learn. It makes use of four optional lifecycle hooks: -- [`validate`](#validate) [optional] - an object of property names and types to validate inputs with pre-request +- [`validate`](#validate) [optional] - an object of property names and types used to validate inputs pre-request - [`request()`](#request) [optional] - an async function that enables mutation of inputs to the final service API request - [`response()`](#response) [optional] - an async function that enables mutation of service API responses before they are returned - [`error()`](#error) [optional] - an async function that enables mutation of service API errors before they are returned @@ -246,9 +294,11 @@ The `aws-lite` plugin API is lightweight and simple to learn. It makes use of fo The above four lifecycle hooks must be exported as an object named `methods`, along with a valid AWS service code property named `service`, like so: ```js -// A simple plugin for validating input +// A simple plugin for validating `TableName` input on dynamodb.PutItem() calls export default { service: 'dynamodb', + awsDoc: 'https://docs.aws.../API_PutItem.html', + readme: 'https://github...#PutItem', methods: { PutItem: { validate: { @@ -261,6 +311,10 @@ export default { aws.dynamodb.PutItem({ TableName: 12345 }) // Throws validation error ``` +Additionally, two optional (but highly recommended) metadata properties that will be included in any method errors: +- `awsDoc` (string) [optional] - intended to be a link to the AWS API doc pertaining to this method; should usually start with `https://docs.aws.amazon.com/...` +- `readme` (string) [optional] - a link to a relevant section in your plugin's readme or docs + Example plugins can be found below, in [`plugins/` dir (containing `@aws-lite/*` plugins)](https://github.com/architect/aws-lite/tree/main/plugins), and in [tests](https://github.com/architect/aws-lite/tree/main/test/mock/plugins). @@ -305,7 +359,7 @@ The `request()` lifecycle hook is an optional async function that enables that e - **`params` (object)** - The method's input parameters - **`utils` (object)** - - Helper utilities for (de)serializing AWS-flavored JSON: `awsjsonMarshall`, `awsjsonUnmarshall` + - [Plugin helper utilities](#plugin-utils) The `request()` method may return nothing, or a [valid client request](#client-requests). An example: @@ -335,12 +389,22 @@ The `response()` lifecycle hook is an async function that enables mutation of se `response()` is executed with two positional arguments: -- **`response` (any)** - - Raw non-error response from AWS service API request; if the entire payload is JSON or AWS-flavored JSON, `aws-lite` will attempt to parse it prior to executing `response()`. Responses that are primarily JSON, but with nested AWS-flavored JSON, will be parsed only as JSON and may require additional deserialization with the `awsjsonUnmarshall` utility +- **`response` (object)** + - An object containing three properties from the API response: + - **`statusCode` (number)** + - HTTP response status code + - **`headers` (object)** + - HTTP response headers + - **`payload` (object or string)** + - Raw non-error response from AWS service API request; if the entire payload is JSON or AWS-flavored JSON, `aws-lite` will attempt to parse it prior to executing `response()`. Responses that are primarily JSON, but with nested AWS-flavored JSON, will be parsed only as JSON and may require additional deserialization with the `awsjsonUnmarshall` utility or `awsjson` property - **`utils` (object)** - - Helper utilities for (de)serializing AWS-flavored JSON: `awsjsonMarshall`, `awsjsonUnmarshall` + - [Plugin helper utilities](#plugin-utils) + +The `response()` method may return: nothing (which will pass through the `response` object as-is) or any data (most commonly an object or string, or mutated version of the `response` object). -The `response()` method may return nothing, but if it does return a mutated response, it must come in the form of an object containing a `response` property, and an optional `awsjson` property (that behaves the same as in [client requests](#client-requests)). An example: +Should you return an object, you may also include an `awsjson` property (that behaves the same as in [client requests](#client-requests)). The `awsjson` property is considered reserved, and will be stripped from any returned data. + +An example: ```js // Automatically deserialize AWS-flavored JSON @@ -348,9 +412,10 @@ export default { service: 'dynamodb', methods: { GetItem: { - // Successful responses always have an AWS-flavored JSON `Item` property + // Assume successful responses always have an AWS-flavored JSON `Item` property response: async (response, utils) => { - return { awsjson: [ 'Item' ], response } + response.awsjson = [ 'Item' ] + return response // Returns the response (`statusCode`, `headers`, `payload`), with `payload.Item` unformatted from AWS-flavored JSON, and the `awsjson` property removed } } } @@ -370,9 +435,9 @@ The `error()` lifecycle hook is an async function that enables mutation of servi - **`metadata` (object)** - `aws-lite` error metadata; to improve the quality of the errors presented by `aws-lite`, please only append to this object - **`statusCode` (number or undefined)** - resulting status code of the API response; if an HTTP connection error occurred, no `statusCode` will be present - **`utils` (object)** - - Helper utilities for (de)serializing AWS-flavored JSON: `awsjsonMarshall`, `awsjsonUnmarshall` + - [Plugin helper utilities](#plugin-utils) -The `error()` method may return nothing, a new or mutated version of the error payload it was passed, a string, an object, or a JS error. An example +The `error()` method may return nothing, a new or mutated version of the error payload it was passed, a string, an object, or a JS error. An example: ```js // Improve clarity of error output @@ -394,11 +459,47 @@ export default { ``` +#### Plugin utils + +[`request()`](#request), [`response()`](#response), and [`error()`](#error) are all passed a second argument of helper utilities and data pertaining to the client: + +- **`awsjsonMarshall` (function)** + - Utility for marshalling data to the format underlying AWS-flavored JSON serialization; accepts a plain object, returns a marshalled object +- **`awsjsonUnmarshall` (function)** + - Utility for unmarshalling data from the format underlying AWS-flavored JSON serialization; accepts a marshalled object, returns a plain object +- **`config` (object)** + - The current [client configuration](#configuration); any configured credentials are found in the `credentials` object +- **`credentials` (object)** + - `accessKeyId`, `secretAccessKey`, and `sessionToken` being used in this request + - Note: `secretAccessKey` and `sessionToken` are present in this object, but non-enumerable +- **`region` (string)** + - Canonical service region being used in this request; this value may differ from the region set in the `config` object if overridden per-request + +An example of plugin utils: + +```js +async function request (params, utils) { + let awsStyle = utils.awsjsonMarshall({ ok: true, hi: 'there' }) + console.log(marshalled) // { ok: { BOOL: true }, hi: { S: 'there' } } + + let plain = utils.awsjsonUnmarshall({ ok: { BOOL: true }, hi: { S: 'there' } }) + console.log(unmarshalled) // { ok: true, hi: 'there' } + + console.log(config) // { profile: 'my-profile', autoloadPlugins: true, ... } + + console.log(credentials) // { accessKeyId: 'abc123...' } secrets are non-enumerable + + console.log(region) // 'us-west-1' +} +``` + + ### List of official `@aws-lite/*` plugins - [DynamoDB](https://www.npmjs.com/package/@aws-lite/dynamodb) +- [S3](https://www.npmjs.com/package/@aws-lite/s3) diff --git a/scripts/generate-plugins/_plugin-tmpl.mjs b/scripts/generate-plugins/_plugin-tmpl.mjs index adcbd80d..98cd760c 100644 --- a/scripts/generate-plugins/_plugin-tmpl.mjs +++ b/scripts/generate-plugins/_plugin-tmpl.mjs @@ -7,9 +7,9 @@ const required = true export default { service, methods: { - // TODO: include a reference link with each method, example: - // https://docs.aws.amazon.com/lambda/latest/dg/API_GetFunctionConfiguration.html $ReplaceMe: { + // Include a reference link with each method, for example: + awsDoc: 'https://docs.aws.amazon.com/lambda/latest/dg/API_GetFunctionConfiguration.html', validate: { name: { type: 'string', required }, }, @@ -33,6 +33,7 @@ export default { // TODO: add API link $ReplaceMeToo: { + awsDoc: 'https://docs.aws.amazon.com/...', validate: { name: { type: 'string', required }, }, diff --git a/scripts/generate-plugins/_readme-tmpl.md b/scripts/generate-plugins/_readme-tmpl.md index 12149b5f..3fe4d5ea 100644 --- a/scripts/generate-plugins/_readme-tmpl.md +++ b/scripts/generate-plugins/_readme-tmpl.md @@ -12,6 +12,13 @@ npm i $NAME ``` +## Docs + + + + + + ## Learn more Please see the [main `aws-lite` readme](https://github.com/architect/aws-lite) for more information about `aws-lite` plugins. diff --git a/scripts/generate-plugins/index.js b/scripts/generate-plugins/index.js deleted file mode 100644 index 676d358e..00000000 --- a/scripts/generate-plugins/index.js +++ /dev/null @@ -1,73 +0,0 @@ -#! /usr/bin/env node -let { join } = require('path') -let { existsSync, mkdirSync, readFileSync, writeFileSync } = require('fs') -const cwd = process.cwd() - -// Break this into a separate file if it becomes too big / unwieldy! -// - name: the official service name; example: `cloudformation` -// - service: the commonly recognized, more formal version (including casing); example: `CloudFormation` -// - maintainers: array of GitHub handles of the individual(s) or org(s) responsible for maintaining the plugin -const plugins = [ - { name: 'dynamodb', service: 'DynamoDB', maintainers: [ '@architect' ] }, -].sort() -const pluginTmpl = readFileSync(join(__dirname, '_plugin-tmpl.mjs')).toString() -const readmeTmpl = readFileSync(join(__dirname, '_readme-tmpl.md')).toString() -const packageTmpl = readFileSync(join(__dirname, '_package-tmpl.json')) - -plugins.forEach(plugin => { - if (!plugin.name || typeof plugin.name !== 'string' || - !plugin.service || typeof plugin.service !== 'string' || - !plugin.maintainers || !Array.isArray(plugin.maintainers)) { - throw ReferenceError(`Specified plugin must have 'name' (string), 'service' (string), and 'maintainers' (array)`) - } - - let pluginDir = join(cwd, 'plugins', plugin.name) - let maintainers = plugin.maintainers.join(', ') - if (!existsSync(pluginDir)) { - let pluginSrc = join(pluginDir, 'src') - mkdirSync(pluginSrc, { recursive: true }) - - let name = `@aws-lite/${plugin.name}` - let desc = `Official \`aws-lite\` plugin for ${plugin.service}` - - // Plugin: src/index.js - let src = pluginTmpl - .replace(/\$NAME/g, plugin.name) - .replace(/\$MAINTAINERS/g, maintainers) - writeFileSync(join(pluginSrc, 'index.mjs'), src) - - // Plugin: package.json - let pkg = JSON.parse(packageTmpl) - pkg.name = name - pkg.description = desc - pkg.author = maintainers - pkg.repository.directory = `plugins/${plugin.name}` - writeFileSync(join(pluginDir, 'package.json'), JSON.stringify(pkg, null, 2)) - - // Plugin: readme.md - let maintainerLinks = plugin.maintainers.map(p => `[${p}](https://github.com/${p.replace('@', '')})`).join(', ') - let readme = readmeTmpl - .replace(/\$NAME/g, name) - .replace(/\$DESC/g, desc) - .replace(/\$MAINTAINERS/g, maintainerLinks) - writeFileSync(join(pluginDir, 'readme.md'), readme) - - // Project: package.json - let projectPkgFile = join(cwd, 'package.json') - let projectPkg = JSON.parse(readFileSync(projectPkgFile)) - let workspace = `plugins/${plugin.name}` - if (!projectPkg.workspaces.includes(workspace)) { - projectPkg.workspaces.push(workspace) - projectPkg.workspaces = projectPkg.workspaces.sort() - writeFileSync(projectPkgFile, JSON.stringify(projectPkg, null, 2)) - } - } -}) - -// Project readme.md -let projectReadmeFile = join(cwd, 'readme.md') -let projectReadme = readFileSync(projectReadmeFile).toString() -let pluginListRegex = /(?<=(\n))[\s\S]*?(?=())/g -let pluginList = plugins.map(({ name, service }) => `- [${service}](https://www.npmjs.com/package/@aws-lite/${name})`) -projectReadme = projectReadme.replace(pluginListRegex, pluginList.join('\n') + '\n') -writeFileSync(projectReadmeFile, projectReadme) diff --git a/scripts/generate-plugins/index.mjs b/scripts/generate-plugins/index.mjs new file mode 100644 index 00000000..91cb0c23 --- /dev/null +++ b/scripts/generate-plugins/index.mjs @@ -0,0 +1,118 @@ +#! /usr/bin/env node +import { join } from 'node:path' +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs' + +const cwd = process.cwd() +const pluginListRegex = /(?<=(\n))[\s\S]*?(?=())/g +const pluginMethodsRegex = /(?<=(\n))[\s\S]*?(?=())/g + +// Break this into a separate file if it becomes too big / unwieldy! +// - name: the official service name; example: `cloudformation` +// - service: the commonly recognized, more formal version (including casing); example: `CloudFormation` +// - maintainers: array of GitHub handles of the individual(s) or org(s) responsible for maintaining the plugin +const plugins = [ + { name: 'dynamodb', service: 'DynamoDB', maintainers: [ '@architect' ] }, + { name: 's3', service: 'S3', maintainers: [ '@architect' ] }, +].sort() + +const pluginTmpl = readFileSync(join(cwd, 'scripts', 'generate-plugins', '_plugin-tmpl.mjs')).toString() +const readmeTmpl = readFileSync(join(cwd, 'scripts', 'generate-plugins', '_readme-tmpl.md')).toString() +const packageTmpl = readFileSync(join(cwd, 'scripts', 'generate-plugins', '_package-tmpl.json')) + +async function main () { + for (let plugin of plugins) { + if (!plugin.name || typeof plugin.name !== 'string' || + !plugin.service || typeof plugin.service !== 'string' || + !plugin.maintainers || !Array.isArray(plugin.maintainers)) { + throw ReferenceError(`Specified plugin must have 'name' (string), 'service' (string), and 'maintainers' (array)`) + } + + let name = `@aws-lite/${plugin.name}` + let pluginDir = join(cwd, 'plugins', plugin.name) + let maintainers = plugin.maintainers.join(', ') + if (!existsSync(pluginDir)) { + let pluginSrc = join(pluginDir, 'src') + mkdirSync(pluginSrc, { recursive: true }) + + let desc = `Official \`aws-lite\` plugin for ${plugin.service}` + + // Plugin: src/index.js + let src = pluginTmpl + .replace(/\$NAME/g, plugin.name) + .replace(/\$MAINTAINERS/g, maintainers) + writeFileSync(join(pluginSrc, 'index.mjs'), src) + + // Plugin: package.json + let pkg = JSON.parse(packageTmpl) + pkg.name = name + pkg.description = desc + pkg.author = maintainers + pkg.repository.directory = `plugins/${plugin.name}` + writeFileSync(join(pluginDir, 'package.json'), JSON.stringify(pkg, null, 2)) + + // Plugin: readme.md + let maintainerLinks = plugin.maintainers.map(p => `[${p}](https://github.com/${p.replace('@', '')})`).join(', ') + let readme = readmeTmpl + .replace(/\$NAME/g, name) + .replace(/\$DESC/g, desc) + .replace(/\$MAINTAINERS/g, maintainerLinks) + writeFileSync(join(pluginDir, 'readme.md'), readme) + + // Project: package.json + let projectPkgFile = join(cwd, 'package.json') + let projectPkg = JSON.parse(readFileSync(projectPkgFile)) + let workspace = `plugins/${plugin.name}` + if (!projectPkg.workspaces.includes(workspace)) { + projectPkg.workspaces.push(workspace) + projectPkg.workspaces = projectPkg.workspaces.sort() + writeFileSync(projectPkgFile, JSON.stringify(projectPkg, null, 2)) + } + } + // Maybe update docs + else { + // TODO ↓ remove once things are nice and dialed in! ↓ + if (plugin.name !== 's3') continue + + const pluginReadmeFile = join(pluginDir, 'readme.md') + let pluginReadme = readFileSync(pluginReadmeFile).toString() + // Generate docs markdown + const { default: _plugin } = await import(name) + let incompleteMethods = [] + let methodDocs = Object.keys(_plugin.methods).map(methodName => { + let header = `### \`${methodName}\`\n\n` + if (!_plugin.methods[methodName] || _plugin.methods[methodName].disabled) { + incompleteMethods.push(methodName) + return + } + const { awsDoc, validate } = _plugin.methods[methodName] + if (!awsDoc) throw ReferenceError(`All methods must refer to an AWS service API doc: ${name} ${methodName}`) + header += `[Canonical AWS API doc](${awsDoc})\n` + if (validate) { + header += `\nProperties:\n` + Object.entries(validate).map(([ param, values ]) => { + const { type, required, comment } = values + const _req = required ? ' [required]' : '' + const _com = comment ? `\n - ${comment}` : '' + return `- **\`${param}\` (${type})${_req}**${_com}` + }).join('\n') + } + return header + }).filter(Boolean).join('\n\n\n') + '\n' + + if (incompleteMethods.length) { + methodDocs += `\n\n### Methods yet to be implemented\n\n` + + `> Please help out by [opening a PR](https://github.com/architect/aws-lite#authoring-aws-lite-plugins)!\n\n` + + incompleteMethods.map(methodName => `- \`${methodName}\``).join('\n') + '\n' + } + pluginReadme = pluginReadme.replace(pluginMethodsRegex, methodDocs) + writeFileSync(pluginReadmeFile, pluginReadme) + } + } + + // Project readme.md + const projectReadmeFile = join(cwd, 'readme.md') + let projectReadme = readFileSync(projectReadmeFile).toString() + const pluginList = plugins.map(({ name, service }) => `- [${service}](https://www.npmjs.com/package/@aws-lite/${name})`) + projectReadme = projectReadme.replace(pluginListRegex, pluginList.join('\n') + '\n') + writeFileSync(projectReadmeFile, projectReadme) +} +main() diff --git a/src/client-factory.js b/src/client-factory.js index 5890fd26..89a98e85 100644 --- a/src/client-factory.js +++ b/src/client-factory.js @@ -1,4 +1,4 @@ -let { readdirSync } = require('fs') +let { readdir } = require('fs/promises') let { join } = require('path') let { services } = require('./services') let request = require('./request') @@ -7,6 +7,9 @@ let { awsjson } = require('./lib') let { marshall, unmarshall } = require('./_vendor') let errorHandler = require('./error') +let credentialProps = [ 'accessKeyId', 'secretAccessKey', 'sessionToken' ] +let copy = obj => JSON.parse(JSON.stringify(obj)) + // Never autoload these `@aws-lite/*` packages: let ignored = [ 'client', 'arc' ] @@ -26,14 +29,13 @@ module.exports = async function clientFactory (config, creds, region) { // Service API plugins let { autoloadPlugins = true, plugins = [] } = config - /* istanbul ignore next */ // TODO check once plugins are published if (autoloadPlugins) { let nodeModulesDir = join(process.cwd(), 'node_modules') - let mods = readdirSync(nodeModulesDir) + let mods = await readdir(nodeModulesDir) // Find first-party plugins if (mods.includes('@aws-lite')) { - let knownPlugins = readdirSync(join(nodeModulesDir, '@aws-lite')) - let filtered = knownPlugins.filter(p => !ignored.includes(p)).map(p => `@aws-lite/${p}`) + let knownPlugins = await readdir(join(nodeModulesDir, '@aws-lite')) + let filtered = knownPlugins.filter(p => !ignored.includes(p) && !p.endsWith('-types')).map(p => `@aws-lite/${p}`) plugins.push(...filtered) } // Find correctly namespaced 3rd-party plugins @@ -73,8 +75,6 @@ module.exports = async function clientFactory (config, creds, region) { if (method.request && typeof method.request !== 'function') { throw ReferenceError(`All plugin request methods must be a function: ${service}`) } - // Error + Response handlers are optional - /* istanbul ignore next */ // TODO remove as soon as plugin.response() API settles if (method.response && typeof method.response !== 'function') { throw ReferenceError(`All plugin response methods must be a function: ${service}`) } @@ -82,48 +82,85 @@ module.exports = async function clientFactory (config, creds, region) { throw ReferenceError(`All plugin error methods must be a function: ${service}`) } }) - let pluginUtils = { awsjsonMarshall: marshall, awsjsonUnmarshall: unmarshall } + + let configuration = copy(config) + credentialProps.forEach(p => delete configuration[p]) + let credentials = copy(creds) + Object.defineProperty(credentials, 'secretAccessKey', { enumerable: false }) + Object.defineProperty(credentials, 'sessionToken', { enumerable: false }) + let pluginUtils = { + awsjsonMarshall: marshall, + awsjsonUnmarshall: unmarshall, + config: configuration, + credentials, + } let clientMethods = {} Object.entries(methods).forEach(([ name, method ]) => { + // Allow for falsy methods to be denoted as incomplete in generated docs + if (!method || method.disabled) return + // For convenient error reporting (and jic anyone wants to enumerate everything) try to ensure the AWS API method names pass through clientMethods[name] = Object.defineProperty(async input => { + input = input || {} let selectedRegion = input?.region || region let metadata = { service, name } + if (method.awsDoc) { + metadata.awsDoc = method.awsDoc + } + // Printed after the AWS doc + if (pluginName.startsWith('@aws-lite/')) { + metadata.readme = `https://github.com/architect/aws-lite/blob/main/plugins/${service}/readme.md#${name}` + } + else if (method.readme) { + metadata.readme = method.readme + } - // Run plugin.request() - try { - var result = await method.request(input, pluginUtils) - result = result || {} + // Initial validation + if (method.validate) { + validateInput(method.validate, input, metadata) } - catch (methodError) { - errorHandler({ error: methodError, metadata }) + + // Run plugin.request() + if (method.request) { + try { + var req = await method.request(input, { ...pluginUtils, region: selectedRegion }) + req = req || {} + } + catch (methodError) { + errorHandler({ error: methodError, metadata }) + } } - // Hit plugin.validate - let params = { ...input, ...result } + // Validate combined inputs of user + plugin + let params = { ...input, ...req } if (method.validate) { validateInput(method.validate, params, metadata) } // Make the request try { - let response = await request({ ...params, ...result, service }, creds, selectedRegion, config, metadata) + let response = await request({ ...params, service }, creds, selectedRegion, config, metadata) // Run plugin.response() - /* istanbul ignore next */ // TODO remove as soon as plugin.response() API settles if (method.response) { try { - var result = await method.response(response, pluginUtils) - if (result && result.response === undefined) { - throw TypeError('Response plugins must return a response property') - } + var pluginRes = await method.response(response, { ...pluginUtils, region: selectedRegion }) } catch (methodError) { errorHandler({ error: methodError, metadata }) } - response = result?.awsjson - ? awsjson.unmarshall(result.response, result.awsjson) - : result?.response || response + if (pluginRes !== undefined) { + let unmarshalling = pluginRes.awsjson + if (unmarshalling) { + delete pluginRes.awsjson + // If a payload property isn't included, it _is_ the payload + let unmarshalled = awsjson.unmarshall(pluginRes.payload || pluginRes, unmarshalling) + response = pluginRes.payload + ? { ...pluginRes, payload: unmarshalled } + : unmarshalled + } + else response = pluginRes + } } return response } @@ -131,7 +168,7 @@ module.exports = async function clientFactory (config, creds, region) { // Run plugin.error() if (method.error && !(input instanceof Error)) { try { - let updatedError = await method.error(err, pluginUtils) + let updatedError = await method.error(err, { ...pluginUtils, region: selectedRegion }) errorHandler(updatedError || err) } catch (methodError) { diff --git a/src/error.js b/src/error.js index 4225654b..a745f48d 100644 --- a/src/error.js +++ b/src/error.js @@ -4,13 +4,16 @@ module.exports = function errorHandler (input) { throw input } - let { error, statusCode, metadata } = input + let { statusCode, headers, error, metadata } = input // If the error passed is an actual Error, it probably came from a plugin method failing, so we should attempt to retain its beautiful, beautiful stack trace let err = error instanceof Error ? error : Error() if (statusCode) { err.statusCode = statusCode } + if (headers) { + err.headers = headers + } // The most common error response from AWS services if (typeof error === 'object') { diff --git a/src/get-creds.js b/src/get-creds.js index c09f4033..e52d0cd6 100644 --- a/src/get-creds.js +++ b/src/get-creds.js @@ -1,10 +1,11 @@ -let { existsSync, readFileSync } = require('fs') +let { readFile } = require('fs/promises') +let { exists } = require('./lib') let { join } = require('path') let os = require('os') let ini = require('ini') // https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html -module.exports = function getCreds (params) { +module.exports = async function getCreds (params) { let paramsCreds = validate(params) if (paramsCreds) return paramsCreds @@ -13,7 +14,7 @@ module.exports = function getCreds (params) { let isInLambda = process.env.AWS_LAMBDA_FUNCTION_NAME if (!isInLambda) { - let credsFileCreds = getCredsFromFile(params) + let credsFileCreds = await getCredsFromFile(params) if (credsFileCreds) return credsFileCreds } @@ -29,14 +30,14 @@ function getCredsFromEnv () { return validate({ accessKeyId, secretAccessKey, sessionToken }) } -function getCredsFromFile (params) { +async function getCredsFromFile (params) { let { AWS_SHARED_CREDENTIALS_FILE, AWS_PROFILE } = process.env let profile = params.profile || AWS_PROFILE || 'default' let home = os.homedir() let credsFile = AWS_SHARED_CREDENTIALS_FILE || join(home, '.aws', 'credentials') - if (existsSync(credsFile)) { - let file = readFileSync(credsFile) + if (await exists(credsFile)) { + let file = await readFile(credsFile) let creds = ini.parse(file.toString()) if (!creds[profile]) { diff --git a/src/get-region.js b/src/get-region.js index 9a1b1bff..7af1f711 100644 --- a/src/get-region.js +++ b/src/get-region.js @@ -1,10 +1,11 @@ -let { existsSync, readFileSync } = require('fs') +let { readFile } = require('fs/promises') +let { exists } = require('./lib') let { join } = require('path') let os = require('os') let ini = require('ini') let regions = require('./regions.json') -module.exports = function getRegion (params) { +module.exports = async function getRegion (params) { let { region } = params let paramsRegion = validateRegion(region) @@ -15,7 +16,7 @@ module.exports = function getRegion (params) { let isInLambda = process.env.AWS_LAMBDA_FUNCTION_NAME if (!isInLambda) { - let configRegion = getRegionFromConfig(params) + let configRegion = await getRegionFromConfig(params) if (configRegion) return configRegion } @@ -29,7 +30,7 @@ function getRegionFromEnv () { return validateRegion(region) } -function getRegionFromConfig (params) { +async function getRegionFromConfig (params) { let { AWS_SDK_LOAD_CONFIG, AWS_CONFIG_FILE, AWS_PROFILE } = process.env if (!AWS_SDK_LOAD_CONFIG) return false @@ -38,8 +39,8 @@ function getRegionFromConfig (params) { let home = os.homedir() let configFile = AWS_CONFIG_FILE || join(home, '.aws', 'config') - if (existsSync(configFile)) { - let file = readFileSync(configFile) + if (await exists(configFile)) { + let file = await readFile(configFile) let config = ini.parse(file.toString()) if (!config[profileName]) { diff --git a/src/index.js b/src/index.js index 094cee12..75d9ac2e 100644 --- a/src/index.js +++ b/src/index.js @@ -21,8 +21,8 @@ let clientFactory = require('./client-factory') */ module.exports = async function awsLite (config = {}) { // Creds + region first - let creds = getCreds(config) - let region = getRegion(config) + let creds = await getCreds(config) + let region = await getRegion(config) // Set defaults config.protocol = config.protocol ?? 'https' diff --git a/src/lib.js b/src/lib.js index 846efb99..998a6684 100644 --- a/src/lib.js +++ b/src/lib.js @@ -1,5 +1,7 @@ +let { stat } = require('fs/promises') let { marshall, unmarshall } = require('./_vendor') +// AWS-flavored JSON stuff function marshaller (method, obj, awsjsonSetting) { // We may not be able to AWS JSON-[en|de]code the whole payload, check for specified keys if (Array.isArray(awsjsonSetting)) { @@ -17,6 +19,11 @@ let awsjson = { unmarshall: marshaller.bind({}, unmarshall), } +async function exists (file) { + try { await stat(file); return true } + catch { return false } +} + // Probably this is going to need some refactoring in Arc 11 // Certainly it is not reliable in !Arc local Lambda emulation let nonLocalEnvs = [ 'staging', 'production' ] @@ -30,4 +37,4 @@ function useAWS () { return true } -module.exports = { awsjson, useAWS } +module.exports = { awsjson, exists, useAWS } diff --git a/src/request.js b/src/request.js index b0595b6f..2c465741 100644 --- a/src/request.js +++ b/src/request.js @@ -1,4 +1,5 @@ let qs = require('querystring') +let { Readable } = require('stream') let aws4 = require('aws4') let { globalServices, semiGlobalServices } = require('./services') let { is } = require('./validate') @@ -8,12 +9,15 @@ let JSONregex = /application\/json/ let JSONContentType = ct => ct.match(JSONregex) let AwsJSONregex = /application\/x-amz-json/ let AwsJSONContentType = ct => ct.match(AwsJSONregex) +let XMLregex = /(application|text)\/xml/ +let XMLContentType = ct => ct.match(XMLregex) module.exports = function request (params, creds, region, config, metadata) { return new Promise((resolve, reject) => { // Path - params.path = params.endpoint || '/' + // Note: params.path may be passed if the request is coming from a plugin that pre-signed with aws4 + params.path = params.endpoint || params.path || '/' if (!params.path.startsWith('/')) { params.path = '/' + params.path } @@ -39,8 +43,11 @@ module.exports = function request (params, creds, region, config, metadata) { // Body - JSON-ify payload where convenient! let body = params.payload || params.body || params.data || params.json - // Lots of potentially weird valid json (like just a null), deal with it if / when we need to I guess - if (typeof body === 'object') { + let isBuffer = body instanceof Buffer + let isStream = body instanceof Readable + + // Detecting objects leaves open the possibility of some weird valid JSON (like just a null), deal with it if / when we need to I guess + if (typeof body === 'object' && !isBuffer && !isStream) { // Backfill content-type if it's just an object if (!contentType) contentType = 'application/json' @@ -58,11 +65,20 @@ module.exports = function request (params, creds, region, config, metadata) { // Final JSON encoding params.body = JSON.stringify(body) } - // Everything else just passes through - else params.body = body + // Everything besides streams pass through for signing + else { + params.body = isStream ? undefined : body + } // Finalize headers, content-type - if (contentType) headers['content-type'] = contentType + if (contentType) { + headers['content-type'] = contentType + } + // aws4's default content-type is form-urlencoded: backfill if there's a (non-streaming) body, yet no content-type was specified + // We don't want aws4 to attempt to sign stream objects, so if we backfill this content-type on a stream, the signature breaks and auth will fail + else if (params.body) { + headers['content-type'] = 'application/octet-stream' + } params.headers = headers // Sign the payload; let aws4 handle (most) logic related to region + service instantiation @@ -102,8 +118,11 @@ module.exports = function request (params, creds, region, config, metadata) { /* istanbul ignore next */ if (config.debug) { let { method = 'GET', service, host, path, port = '', headers, protocol, body } = options - let truncatedBody = body?.length > 1000 ? body?.substring(0, 1000) + '...' : body - console.error('[aws-lite] Requesting:', { + let truncatedBody + /**/ if (isBuffer) truncatedBody = `` + else if (isStream) truncatedBody = `` + else truncatedBody = body?.length > 1000 ? body?.substring(0, 1000) + '...' : body + console.error('[aws-lite] Request:', { service, method, url: `${protocol}//${host}${port}${path}`, @@ -119,21 +138,42 @@ module.exports = function request (params, creds, region, config, metadata) { let ok = statusCode >= 200 && statusCode < 303 res.on('data', chunk => data.push(chunk)) res.on('end', () => { - // TODO The following string coersion will definitely need be changed when we get into binary response payloads - let result = Buffer.concat(data).toString() + let body = Buffer.concat(data), payload, rawString let contentType = headers['content-type'] || headers['Content-Type'] || '' if (JSONContentType(contentType) || AwsJSONContentType(contentType)) { - result = JSON.parse(result) - } - // Some services may attempt to respond with regular JSON, but an AWS JSON content-type. Sure. Ok. Anyway, try to guard against that. - if (AwsJSONContentType(contentType)) { - try { - result = awsjson.unmarshall(result) + payload = JSON.parse(body) + + /* istanbul ignore next */ + if (config.debug) rawString = body.toString() + + // Some services may attempt to respond with regular JSON, but an AWS JSON content-type. Sure. Ok. Anyway, try to guard against that. + if (AwsJSONContentType(contentType)) { + try { + payload = awsjson.unmarshall(payload) + } + catch { /* noop, it's already parsed */ } } - catch { /* noop, it's already parsed */ } } - if (ok) resolve(result) - else reject({ error: result, metadata, statusCode }) + if (XMLContentType(contentType)) { + payload = body.toString() + /* istanbul ignore next */ + if (config.debug) rawString = payload + } + payload = payload || (body.length ? body : null) + + /* istanbul ignore next */ + if (config.debug) { + let truncatedBody + /**/ if (payload instanceof Buffer) truncatedBody = body.length ? `` : '' + else if (rawString) truncatedBody = rawString?.length > 1000 ? rawString?.substring(0, 1000) + '...' : rawString + console.error('[aws-lite] Response:', { + statusCode, + headers, + body: truncatedBody || '', + }) + } + if (ok) resolve({ statusCode, headers, payload }) + else reject({ statusCode, headers, error: payload, metadata }) }) }) req.on('error', error => reject({ @@ -147,6 +187,18 @@ module.exports = function request (params, creds, region, config, metadata) { port: options.port, } })) - req.end(options.body || '') + + if (isStream) { + body.pipe(req) + /* istanbul ignore next */ + if (config.debug) { + let bytes = 0 + body.on('data', chunk => { + bytes += chunk.length + console.error(`Bytes streamed: ${bytes}`) + }) + } + } + else req.end(options.body || '') }) } diff --git a/test/lib/index.js b/test/lib/index.js index 490a2e0d..100f2fa2 100644 --- a/test/lib/index.js +++ b/test/lib/index.js @@ -4,6 +4,7 @@ const http = require('http') const accessKeyId = 'foo' const autoloadPlugins = false const badPort = 12345 +const debug = false const host = 'localhost' const keepAlive = false const protocol = 'http' @@ -12,7 +13,7 @@ const secretAccessKey = 'bar' const service = 'lambda' const endpoint = '/an/endpoint' const port = 1111 -const config = { accessKeyId, secretAccessKey, region, protocol, autoloadPlugins, keepAlive, host, port } +const config = { accessKeyId, secretAccessKey, region, debug, protocol, autoloadPlugins, keepAlive, host, port } const defaults = { accessKeyId, autoloadPlugins, badPort, config, host, keepAlive, protocol, region, secretAccessKey, service, endpoint, port } let serverData = {} @@ -29,7 +30,7 @@ let server = { if (data.length) { body = req.headers?.['content-type']?.includes('json') ? JSON.parse(data) - : data.join() + : Buffer.concat(data) } serverData.request = { url: req.url, diff --git a/test/live/_iam.mjs b/test/live/_iam.mjs index 2fdd68ae..5cc26c3c 100644 --- a/test/live/_iam.mjs +++ b/test/live/_iam.mjs @@ -12,6 +12,7 @@ export default { } } }, + response: async ({ payload }) => payload, }, CreateRole: { request: async function ({ name, policyDoc, desc, path }) { @@ -26,6 +27,7 @@ export default { } } }, + response: async ({ payload }) => payload, } } } diff --git a/test/live/_lambda.js b/test/live/_lambda.js index 7936567e..aa60cdae 100644 --- a/test/live/_lambda.js +++ b/test/live/_lambda.js @@ -22,6 +22,7 @@ module.exports = { endpoint: `/2015-03-31/functions/${name}/configuration` } }, + response: async ({ payload }) => payload, }, // https://docs.aws.amazon.com/lambda/latest/dg/API_Invoke.html @@ -35,6 +36,7 @@ module.exports = { endpoint: `/2015-03-31/functions/${name}/invocations` } }, + response: async ({ payload }) => payload, }, } } diff --git a/test/mock/plugins/cjs/index.js b/test/mock/plugins/cjs/index.js index a24d5f42..6cae6dc0 100644 --- a/test/mock/plugins/cjs/index.js +++ b/test/mock/plugins/cjs/index.js @@ -1,2 +1,2 @@ // Just a passthrough -module.exports = require('../get') +module.exports = require('../request-get') diff --git a/test/mock/plugins/errors.js b/test/mock/plugins/error.js similarity index 68% rename from test/mock/plugins/errors.js rename to test/mock/plugins/error.js index 855914eb..ec662f78 100644 --- a/test/mock/plugins/errors.js +++ b/test/mock/plugins/error.js @@ -5,15 +5,25 @@ module.exports = { service: 'lambda', methods: { requestMethodBlowsUp: { + awsDoc: 'https://requestMethodBlowsUp.lol', request: async (input) => { input.foo.bar = 'idk' return input } }, + responseMethodBlowsUp: { + awsDoc: 'https://responseMethodBlowsUp.lol', + response: async (input) => { + input.foo.bar = 'idk' + return input + } + }, noErrorMethod: { request: passthrough, }, errorMethodMutatesError: { + awsDoc: 'https://errorMethodMutatesError.lol', + readme: 'lolidk', request: noop, error: async (error) => { if (error.statusCode === 400 && @@ -24,10 +34,12 @@ module.exports = { } }, errorMethodNoop: { + awsDoc: 'https://errorMethodNoop.lol', request: noop, error: noop, }, errorMethodBlowsUp: { + awsDoc: 'https://errorMethodBlowsUp.lol', request: noop, error: async (err) => { err.metadata.type = message diff --git a/test/mock/plugins/invalid/invalid-error-method.js b/test/mock/plugins/invalid/invalid-error-method.js index 28d71589..1e09a660 100644 --- a/test/mock/plugins/invalid/invalid-error-method.js +++ b/test/mock/plugins/invalid/invalid-error-method.js @@ -2,7 +2,6 @@ module.exports = { service: 'lambda', methods: { foo: { - request: async () => {}, error: true, } } diff --git a/test/mock/plugins/invalid/invalid-response-method.js b/test/mock/plugins/invalid/invalid-response-method.js new file mode 100644 index 00000000..f01e7926 --- /dev/null +++ b/test/mock/plugins/invalid/invalid-response-method.js @@ -0,0 +1,8 @@ +module.exports = { + service: 'lambda', + methods: { + foo: { + response: true, + } + } +} diff --git a/test/mock/plugins/misc/disabled-methods.js b/test/mock/plugins/misc/disabled-methods.js new file mode 100644 index 00000000..e493908e --- /dev/null +++ b/test/mock/plugins/misc/disabled-methods.js @@ -0,0 +1,13 @@ +module.exports = { + service: 'lambda', + methods: { + ok: { + request: () => {} + }, + disabledByFalsy: false, + disabledByParam: { + disabled: true, + awsDoc: 'https://arc.codes', + }, + } +} diff --git a/test/mock/plugins/get.js b/test/mock/plugins/request-get.js similarity index 100% rename from test/mock/plugins/get.js rename to test/mock/plugins/request-get.js diff --git a/test/mock/plugins/post.js b/test/mock/plugins/request-post.js similarity index 100% rename from test/mock/plugins/post.js rename to test/mock/plugins/request-post.js diff --git a/test/mock/plugins/response.js b/test/mock/plugins/response.js new file mode 100644 index 00000000..287eaf27 --- /dev/null +++ b/test/mock/plugins/response.js @@ -0,0 +1,46 @@ +const required = true +const passthrough = params => params + +module.exports = { + service: 'lambda', + methods: { + NoResponseMethod: { + request: passthrough + }, + Passthrough: { + response: passthrough + }, + MutateProperty: { + response: params => { + params.statusCode = 234 + return params + } + }, + MutateAllProperties: { + response: params => { + params.statusCode = 234 + params.headers.foo = 'bar' + params.payload = { hi: 'there' } + return params + } + }, + OnlyPassThroughPayload: { + response: params => params.payload + }, + ReturnWhatever: { + response: params => 'yooo' + }, + ReturnAwsJsonAll: { + response: params => ({ ...params, awsjson: true }) + }, + ReturnAwsJsonPayload: { + response: params => ({ ...params.payload, awsjson: true }) + }, + ReturnAwsJsonKey: { + response: params => ({ ...params, awsjson: [ 'Item' ] }) + }, + ReturnNothing: { + response: () => {} + }, + } +} diff --git a/test/mock/plugins/validation.js b/test/mock/plugins/validate.js similarity index 86% rename from test/mock/plugins/validation.js rename to test/mock/plugins/validate.js index a97b1bc1..edc1b485 100644 --- a/test/mock/plugins/validation.js +++ b/test/mock/plugins/validate.js @@ -26,5 +26,9 @@ module.exports = { noValidation: { request, }, + pluginBreaksValidation: { + validate: { arr: { type: 'array' } }, + request: async () => ({ arr: 12345 }) + } } } diff --git a/test/unit/src/get-creds-test.js b/test/unit/src/get-creds-test.js index 6061022e..1105f214 100644 --- a/test/unit/src/get-creds-test.js +++ b/test/unit/src/get-creds-test.js @@ -18,36 +18,36 @@ test('Set up env', t => { t.ok(getCreds, 'getCreds module is present') }) -test('Get credentials from passed params', t => { +test('Get credentials from passed params', async t => { t.plan(4) resetAWSEnvVars() let passed, result // Key + secret only passed = { accessKeyId: ok, secretAccessKey: ok } - result = getCreds(passed) + result = await getCreds(passed) t.deepEqual(result, { ...passed, sessionToken: undefined }, 'Returned correct credentials from passed params') // Key + secret + sessionToken passed = { accessKeyId: ok, secretAccessKey: ok, sessionToken: ok } - result = getCreds(passed) + result = await getCreds(passed) t.deepEqual(result, passed, 'Returned correct credentials from passed params') // Prioritize passed params before env or creds file process.env.AWS_ACCESS_KEY_ID = nope process.env.AWS_SECRET_ACCESS_KEY = nope process.env.AWS_SESSION_TOKEN = nope - result = getCreds(passed) + result = await getCreds(passed) t.deepEqual(result, passed, 'Returned correct credentials from passed params') resetAWSEnvVars() process.env.AWS_SHARED_CREDENTIALS_FILE = credentialsMock - result = getCreds(passed) + result = await getCreds(passed) t.deepEqual(result, passed, 'Returned correct credentials from passed params') resetAWSEnvVars() }) -test('Get credentials from env vars', t => { +test('Get credentials from env vars', async t => { t.plan(3) resetAWSEnvVars() let passed, result @@ -57,23 +57,23 @@ test('Get credentials from env vars', t => { // Key + secret only passed = { accessKeyId: ok, secretAccessKey: ok } - result = getCreds({}) + result = await getCreds({}) t.deepEqual(result, { ...passed, sessionToken: undefined }, 'Returned correct credentials from env vars') // Key + secret + sessionToken process.env.AWS_SESSION_TOKEN = ok passed = { accessKeyId: ok, secretAccessKey: ok, sessionToken: ok } - result = getCreds({}) + result = await getCreds({}) t.deepEqual(result, passed, 'Returned correct credentials from env vars') // Prioritize passed params before creds file process.env.AWS_SHARED_CREDENTIALS_FILE = credentialsMock - result = getCreds({}) + result = await getCreds({}) t.deepEqual(result, passed, 'Returned correct credentials from env vars') resetAWSEnvVars() }) -test('Get credentials from credentials file', t => { +test('Get credentials from credentials file', async t => { t.plan(5) resetAWSEnvVars() let result @@ -94,78 +94,105 @@ test('Get credentials from credentials file', t => { let home = os.homedir() let credsFile = join(home, '.aws', 'credentials') mockFs({ [credsFile]: mockFs.load(credentialsMock) }) - result = getCreds({}) + result = await getCreds({}) t.deepEqual(result, defaultProfile, 'Returned correct credentials from credentials file (~/.aws file location)') mockFs.restore() resetAWSEnvVars() // Configured file locations process.env.AWS_SHARED_CREDENTIALS_FILE = credentialsMock - result = getCreds({}) + result = await getCreds({}) t.deepEqual(result, defaultProfile, 'Returned correct credentials from credentials file (default profile)') resetAWSEnvVars() // params.profile process.env.AWS_SHARED_CREDENTIALS_FILE = credentialsMock - result = getCreds({ profile: 'profile_1' }) + result = await getCreds({ profile: 'profile_1' }) t.deepEqual(result, nonDefaultProfile, 'Returned correct credentials from credentials file (params.profile)') resetAWSEnvVars() // AWS_PROFILE env var process.env.AWS_SHARED_CREDENTIALS_FILE = credentialsMock process.env.AWS_PROFILE = profile - result = getCreds({}) + result = await getCreds({}) t.deepEqual(result, nonDefaultProfile, 'Returned correct credentials from credentials file (AWS_PROFILE env var)') resetAWSEnvVars() // Credential file checks are skipped in Lambda process.env.AWS_SHARED_CREDENTIALS_FILE = credentialsMock process.env.AWS_LAMBDA_FUNCTION_NAME = 'true' - t.throws(() => { - getCreds({}) - }, /You must supply AWS credentials via/, 'Did not look for credentials file on disk in Lambda') + try { + await getCreds({}) + } + catch (err) { + t.match(err.message, /You must supply AWS credentials via/, 'Did not look for credentials file on disk in Lambda') + } resetAWSEnvVars() }) -test('Validate credentials', t => { +test('Validate credentials', async t => { t.plan(8) resetAWSEnvVars() - t.throws(() => { - getCreds({ accessKeyId: num }) - }, /Access key must be a string/, 'Threw on invalid access key') + try { + await getCreds({ accessKeyId: num }) + } + catch (err) { + t.match(err.message, /Access key must be a string/, 'Threw on invalid access key') + } - t.throws(() => { - getCreds({ secretAccessKey: num }) - }, /Secret access key must be a string/, 'Threw on invalid secret key') + try { + await getCreds({ secretAccessKey: num }) + } + catch (err) { + t.match(err.message, /Secret access key must be a string/, 'Threw on invalid secret key') + } - t.throws(() => { - getCreds({ sessionToken: num }) - }, /Session token must be a string/, 'Threw on invalid session token') + try { + await getCreds({ sessionToken: num }) + } + catch (err) { + t.match(err.message, /Session token must be a string/, 'Threw on invalid session token') + } - t.throws(() => { - getCreds({ accessKeyId: ok }) - }, /You must supply both an access key ID & secret access key/, 'Threw on invalid credentials combo') + try { + await getCreds({ accessKeyId: ok }) + } + catch (err) { + t.match(err.message, /You must supply both an access key ID & secret access key/, 'Threw on invalid credentials combo') + } - t.throws(() => { - getCreds({ secretAccessKey: ok }) - }, /You must supply both an access key ID & secret access key/, 'Threw on invalid credentials combo') + try { + await getCreds({ secretAccessKey: ok }) + } + catch (err) { + t.match(err.message, /You must supply both an access key ID & secret access key/, 'Threw on invalid credentials combo') + } - t.throws(() => { + try { process.env.AWS_SHARED_CREDENTIALS_FILE = 'meh' // jic dev has actual creds file - getCreds({ sessionToken: ok }) - }, /You must supply AWS credentials via/, 'Threw on invalid credentials combo') + await getCreds({ sessionToken: ok }) + } + catch (err) { + t.match(err.message, /You must supply AWS credentials via/, 'Threw on invalid credentials combo') + } - t.throws(() => { + try { process.env.AWS_SHARED_CREDENTIALS_FILE = credentialsMock process.env.AWS_PROFILE = 'idk' - getCreds({}) - }, /Profile not found/, 'Threw on missing profile') + await getCreds({}) + } + catch (err) { + t.match(err.message, /Profile not found/, 'Threw on missing profile') + } resetAWSEnvVars() - t.throws(() => { + try { process.env.AWS_SHARED_CREDENTIALS_FILE = 'meh' // jic dev has actual creds file - getCreds({}) - }, /You must supply AWS credentials via params, environment variables, or credentials file/, 'Threw on no available credentials') + await getCreds({}) + } + catch (err) { + t.match(err.message, /You must supply AWS credentials via params, environment variables, or credentials file/, 'Threw on no available credentials') + } resetAWSEnvVars() }) diff --git a/test/unit/src/get-region-test.js b/test/unit/src/get-region-test.js index ec265ef5..66f3c0eb 100644 --- a/test/unit/src/get-region-test.js +++ b/test/unit/src/get-region-test.js @@ -19,35 +19,35 @@ test('Set up env', t => { t.ok(getRegion, 'getRegion module is present') }) -test('Get region from passed params', t => { +test('Get region from passed params', async t => { t.plan(1) let region = east1 - let result = getRegion({ region }) + let result = await getRegion({ region }) t.equal(result, region, 'Returned correct region from passed params') }) -test('Get region from env vars', t => { +test('Get region from env vars', async t => { t.plan(3) resetAWSEnvVars() let result process.env.AWS_REGION = east1 - result = getRegion({}) + result = await getRegion({}) t.equal(result, east1, 'Returned correct region from env vars') resetAWSEnvVars() process.env.AWS_DEFAULT_REGION = east1 - result = getRegion({}) + result = await getRegion({}) t.equal(result, east1, 'Returned correct region from env vars') resetAWSEnvVars() process.env.AMAZON_REGION = east1 - result = getRegion({}) + result = await getRegion({}) t.equal(result, east1, 'Returned correct region from env vars') resetAWSEnvVars() }) -test('Get region from config file', t => { +test('Get region from config file', async t => { t.plan(5) resetAWSEnvVars() let result @@ -58,7 +58,7 @@ test('Get region from config file', t => { let configFile = join(home, '.aws', 'config') mockFs({ [configFile]: mockFs.load(configMock) }) process.env.AWS_SDK_LOAD_CONFIG = true - result = getRegion({}) + result = await getRegion({}) t.equal(result, west1, 'Returned correct region from config file (~/.aws file location)') mockFs.restore() resetAWSEnvVars() @@ -66,14 +66,14 @@ test('Get region from config file', t => { // Configured file locations process.env.AWS_SDK_LOAD_CONFIG = true process.env.AWS_CONFIG_FILE = configMock - result = getRegion({}) + result = await getRegion({}) t.equal(result, west1, 'Returned correct region from config file (default profile)') resetAWSEnvVars() // params.profile process.env.AWS_SDK_LOAD_CONFIG = true process.env.AWS_CONFIG_FILE = configMock - result = getRegion({ profile: 'profile_1' }) + result = await getRegion({ profile: 'profile_1' }) t.equal(result, west2, 'Returned correct region from config file (params.profile)') resetAWSEnvVars() @@ -81,7 +81,7 @@ test('Get region from config file', t => { process.env.AWS_SDK_LOAD_CONFIG = true process.env.AWS_CONFIG_FILE = configMock process.env.AWS_PROFILE = profile - result = getRegion({}) + result = await getRegion({}) t.equal(result, west2, 'Returned correct region from config file (AWS_PROFILE env var)') resetAWSEnvVars() @@ -89,40 +89,58 @@ test('Get region from config file', t => { process.env.AWS_SDK_LOAD_CONFIG = true process.env.AWS_CONFIG_FILE = configMock process.env.AWS_LAMBDA_FUNCTION_NAME = 'true' - t.throws(() => { - getRegion({}) - }, /You must supply AWS region/, 'Did not look for config file on disk in Lambda') + try { + await getRegion({}) + } + catch (err) { + t.match(err.message, /You must supply AWS region/, 'Did not look for config file on disk in Lambda') + } resetAWSEnvVars() }) -test('Validate config', t => { +test('Validate config', async t => { t.plan(5) resetAWSEnvVars() - t.throws(() => { - getRegion({ region: num }) - }, /Region must be a string/, 'Threw on invalid region') - - t.throws(() => { - getRegion({ region: 'us-south-14' }) - }, /Invalid region specified/, 'Threw on invalid region') - - t.throws(() => { + try { + await getRegion({ region: num }) + } + catch (err) { + t.match(err.message, /Region must be a string/, 'Threw on invalid region') + } + + try { + await getRegion({ region: 'us-south-14' }) + } + catch (err) { + t.match(err.message, /Invalid region specified/, 'Threw on invalid region') + } + + try { process.env.AWS_SDK_LOAD_CONFIG = true process.env.AWS_CONFIG_FILE = configMock process.env.AWS_PROFILE = 'idk' - getRegion({}) - }, /Profile not found/, 'Threw on missing profile') + await getRegion({}) + } + catch (err) { + t.match(err.message, /Profile not found/, 'Threw on missing profile') + } resetAWSEnvVars() - t.throws(() => { + try { process.env.AWS_SDK_LOAD_CONFIG = true process.env.AWS_CONFIG_FILE = 'meh' - getRegion({}) - }, /You must supply AWS region/, 'Threw on no available config (after attempting to checking filesystem)') + await getRegion({}) + } + catch (err) { + t.match(err.message, /You must supply AWS region/, 'Threw on no available config (after attempting to checking filesystem)') + } resetAWSEnvVars() - t.throws(() => { - getRegion({}) - }, /You must supply AWS region/, 'Threw on no available config') + try { + await getRegion({}) + } + catch (err) { + t.match(err.message, /You must supply AWS region/, 'Threw on no available config') + } }) diff --git a/test/unit/src/index-client-test.js b/test/unit/src/index-client-test.js index f772ecf3..5ba68287 100644 --- a/test/unit/src/index-client-test.js +++ b/test/unit/src/index-client-test.js @@ -1,4 +1,5 @@ let { join } = require('path') +let { Readable } = require('stream') let qs = require('querystring') let test = require('tape') let { basicRequestChecks, defaults, resetServer: reset, server } = require('../../lib') @@ -16,8 +17,8 @@ test('Set up env', async t => { }) test('Primary client - core functionality', async t => { - t.plan(28) - let request, result, body, query, responseBody, url + t.plan(48) + let request, result, payload, query, responseBody, url let headers = { 'content-type': 'application/json' } @@ -27,7 +28,9 @@ test('Primary client - core functionality', async t => { result = await aws({ service, endpoint }) request = server.getCurrentRequest() t.notOk(request.body, 'Request included no body') - t.equal(result, '', 'Client returned empty response body as empty string') + t.equal(result.statusCode, 200, 'Client returned status code of response') + t.ok(result.headers, 'Client returned response headers') + t.equal(result.payload, null, 'Client returned empty response payload as null') basicRequestChecks(t, 'GET') // Basic get request with query string params @@ -37,50 +40,74 @@ test('Primary client - core functionality', async t => { basicRequestChecks(t, 'GET', { url }) // Basic post request - body = { ok: true } + payload = { ok: true } responseBody = { aws: 'lol' } server.use({ responseBody, responseHeaders: headers }) - result = await aws({ service, endpoint, body }) + result = await aws({ service, endpoint, payload }) request = server.getCurrentRequest() - t.deepEqual(request.body, body, 'Request included correct body') - t.deepEqual(result, responseBody, 'Client returned response body as parsed JSON') + t.deepEqual(request.body, payload, 'Request included correct body') + t.deepEqual(result.payload, responseBody, 'Client returned response payload as parsed JSON') basicRequestChecks(t, 'POST') // Basic post with query string params - body = { ok: true } + payload = { ok: true } query = { fiz: 'buz', json: JSON.stringify({ ok: false }) } url = endpoint + '?' + qs.stringify(query) responseBody = { aws: 'lol' } server.use({ responseBody, responseHeaders: headers }) - result = await aws({ service, endpoint, body, query }) + result = await aws({ service, endpoint, payload, query }) basicRequestChecks(t, 'POST', { url }) // Publish an object while passing headers - body = { ok: true } - result = await aws({ service, endpoint, body, headers }) + payload = { ok: true } + result = await aws({ service, endpoint, payload, headers }) request = server.getCurrentRequest() - t.deepEqual(request.body, body, 'Request included correct body (pre-encoded JSON)') + t.deepEqual(request.body, payload, 'Request included correct body (pre-encoded JSON)') reset() // Publish JSON while passing headers - body = JSON.stringify({ ok: true }) - result = await aws({ service, endpoint, body, headers }) + payload = JSON.stringify({ ok: true }) + result = await aws({ service, endpoint, payload, headers }) request = server.getCurrentRequest() - t.deepEqual(request.body, JSON.parse(body), 'Request included correct body (pre-encoded JSON)') + t.deepEqual(request.body, JSON.parse(payload), 'Request included correct body (pre-encoded JSON)') reset() // Publish some other kind of non-JSON request - body = 'hi' - result = await aws({ service, endpoint, body }) + payload = 'hi' + result = await aws({ service, endpoint, payload }) request = server.getCurrentRequest() - t.deepEqual(request.body, body, 'Request included correct body (just a string)') + t.deepEqual(request.body.toString(), payload, 'Request included correct body (just a string)') reset() + // Publish a stream + payload = new Readable() + let text = 'hi\nhello\nyo' + text.split('').forEach(c => payload.push(c)) + payload.push(null) + await aws({ service, endpoint, payload, method: 'POST', headers: { 'content-length': text.length } }) + request = server.getCurrentRequest() + t.equal(request.body.toString(), text, 'Request included correct body') + basicRequestChecks(t, 'POST') + // Ensure endpoints without leading slashes are handled properly result = await aws({ service, endpoint: 'an/endpoint' }) request = server.getCurrentRequest() t.deepEqual(request.url, endpoint, 'Request included correct body (just a string)') reset() + + // Endpoint returns XML + responseBody = 'yo' + server.use({ responseBody, responseHeaders: { 'content-type': 'application/xml' } }) + result = await aws({ service, endpoint }) + t.deepEqual(result.payload, responseBody, 'Client returned response payload as XML string') + basicRequestChecks(t, 'GET') + + // Endpoint returns a buffer + responseBody = Buffer.from('ohi') + server.use({ responseBody, responseHeaders: { 'content-type': 'application/octet-stream' } }) + result = await aws({ service, endpoint }) + t.deepEqual(result.payload, responseBody, 'Client returned response payload as buffer') + basicRequestChecks(t, 'GET') }) test('Primary client - aliased params', async t => { @@ -126,7 +153,7 @@ test('Primary client - aliased params', async t => { let string = 'hi' await aws({ service, endpoint, payload: string }) request = server.getCurrentRequest() - t.equal(request.body, string, 'Made request with correct body (plain string)') + t.equal(request.body.toString(), string, 'Made request with correct body (plain string)') reset() }) @@ -147,7 +174,7 @@ test('Primary client - AWS JSON payloads', async t => { result = await aws({ service, endpoint, body, headers: headersAwsJSON() }) request = server.getCurrentRequest() t.deepEqual(request.body, { ok: { BOOL: true } }, 'Request included correct body (raw AWS JSON)') - t.deepEqual(result, expectedResponseBody(), 'Client returned response body as parsed, unmarshalled JSON') + t.deepEqual(result.payload, expectedResponseBody(), 'Client returned response payload as parsed, unmarshalled JSON') basicRequestChecks(t, 'POST') reset() @@ -157,7 +184,7 @@ test('Primary client - AWS JSON payloads', async t => { result = await aws({ service, endpoint, body, headers: headersAwsJSON() }) request = server.getCurrentRequest() t.deepEqual(request.body, { ok: { BOOL: false } }, 'Request included correct body (raw AWS JSON)') - t.deepEqual(result, expectedResponseBody(), 'Client returned response body as parsed, unmarshalled JSON') + t.deepEqual(result.payload, expectedResponseBody(), 'Client returned response payload as parsed, unmarshalled JSON') basicRequestChecks(t, 'POST') reset() @@ -167,7 +194,7 @@ test('Primary client - AWS JSON payloads', async t => { result = await aws({ service, endpoint, body, awsjson: true }) request = server.getCurrentRequest() t.deepEqual(request.body, { ok: { BOOL: false } }, 'Request included correct body (raw AWS JSON)') - t.deepEqual(result, expectedResponseBody(), 'Client returned response body as parsed, unmarshalled JSON') + t.deepEqual(result.payload, expectedResponseBody(), 'Client returned response payload as parsed, unmarshalled JSON') basicRequestChecks(t, 'POST') reset() @@ -177,7 +204,7 @@ test('Primary client - AWS JSON payloads', async t => { result = await aws({ service, endpoint, body, awsjson: [ 'fine' ] }) request = server.getCurrentRequest() t.deepEqual(request.body, { ok: true, fine: { BOOL: false } }, 'Request included correct body (raw AWS JSON)') - t.deepEqual(result, expectedResponseBody(), 'Client returned response body as parsed, unmarshalled JSON') + t.deepEqual(result.payload, expectedResponseBody(), 'Client returned response payload as parsed, unmarshalled JSON') basicRequestChecks(t, 'POST') reset() @@ -186,12 +213,12 @@ test('Primary client - AWS JSON payloads', async t => { server.use({ responseBody: regularJSON, responseHeaders: headersAwsJSON() }) result = await aws({ service, endpoint }) request = server.getCurrentRequest() - t.deepEqual(result, regularJSON, 'Client returned response body as parsed, unmarshalled JSON') + t.deepEqual(result.payload, regularJSON, 'Client returned response payload as parsed, unmarshalled JSON') reset() }) test('Primary client - error handling', async t => { - t.plan(17) + t.plan(19) let responseStatusCode, responseBody, responseHeaders // Normal error @@ -207,7 +234,8 @@ test('Primary client - error handling', async t => { console.log(err) t.match(err.message, /\@aws-lite\/client: lambda: lolno/, 'Error included basic information') t.equal(err.other, responseBody.other, 'Error has other metadata') - t.equal(err.statusCode, responseStatusCode, 'Error has status code') + t.equal(err.statusCode, responseStatusCode, 'Error has response status code') + t.ok(err.headers, 'Error has response headers') t.equal(err.service, service, 'Error has service') t.ok(err.stack.includes(__filename), 'Stack trace includes this test') reset() @@ -227,7 +255,8 @@ test('Primary client - error handling', async t => { console.log(err) t.match(err.message, /\@aws-lite\/client: lambda/, 'Error included basic information') t.ok(err.message.includes(responseBody), 'Error has message') - t.equal(err.statusCode, responseStatusCode, 'Error has status code') + t.equal(err.statusCode, responseStatusCode, 'Error has response status code') + t.ok(err.headers, 'Error has response headers') t.equal(err.service, service, 'Error has service') t.ok(err.stack.includes(__filename), 'Stack trace includes this test') reset() diff --git a/test/unit/src/index-config-test.js b/test/unit/src/index-config-test.js index 47b53630..9ce7655b 100644 --- a/test/unit/src/index-config-test.js +++ b/test/unit/src/index-config-test.js @@ -1,4 +1,5 @@ let { join } = require('path') +let mockFs = require('mock-fs') let test = require('tape') let { basicRequestChecks, defaults, resetAWSEnvVars: reset, server } = require('../../lib') let cwd = process.cwd() @@ -12,7 +13,7 @@ test('Set up env', async t => { t.ok(client, 'aws-lite client is present') }) -test('Initial configuration', async t => { +test('Configuration - basic config', async t => { t.plan(3) let aws @@ -36,7 +37,39 @@ test('Initial configuration', async t => { } }) -test('Initial configuration - per-request overrides', async t => { +test('Configuration - plugin loading', async t => { + t.plan(3) + let aws + + // Node.js 14.x + npm 6 does funky things with npm link-ed (symlinked) modules + // That's cool, we can confidently skip this test for now, the related code path provably works! + if (!process.versions.node.startsWith('14')) { + t.plan(4) + aws = await client({ accessKeyId, secretAccessKey, region }) + t.ok(aws.dynamodb, 'Client auto-loaded @aws-lite/dynamodb') + } + + aws = await client({ accessKeyId, secretAccessKey, region, autoloadPlugins: false }) + t.notOk(aws.dynamodb, 'Client did not auto-load @aws-lite/dynamodb') + + let nodeModules = join(cwd, 'node_modules') + mockFs({ [nodeModules]: {} }) + aws = await client({ accessKeyId, secretAccessKey, region }) + t.notOk(aws.dynamodb, `Client did not auto-load @aws-lite/* plugins it can't find`) + mockFs.restore() + + // A bit of a funky test, but we don't need to exercise actually loading an aws-lite-plugin-* plugin, we just need to ensure it attempts to + try { + mockFs({ [join(nodeModules, 'aws-lite-plugin-hi')]: {} }) + aws = await client({ accessKeyId, secretAccessKey, region }) + } + catch (err) { + t.match(err.message, /Cannot find module 'aws-lite-plugin-hi'/, 'Found and loaded aws-lite-plugin-*') + } + mockFs.restore() +}) + +test('Configuration - per-request overrides', async t => { t.plan(7) let started = await server.start() t.ok(started, 'Started server') @@ -67,7 +100,7 @@ test('Initial configuration - per-request overrides', async t => { t.pass('Server ended') }) -test('Initial configuration - validation', async t => { +test('Configuration - validation', async t => { t.plan(2) try { await client({ ...config, protocol: 'lolidk' }) diff --git a/test/unit/src/index-plugins-test.js b/test/unit/src/index-plugins-test.js index 075c7926..344d7371 100644 --- a/test/unit/src/index-plugins-test.js +++ b/test/unit/src/index-plugins-test.js @@ -17,61 +17,12 @@ test('Set up env', async t => { t.ok(started, 'Started server') }) -test('Plugins - method construction, requests', async t => { - t.plan(29) - let name = 'my-lambda' - let aws, expectedEndpoint, request - - // Reads - aws = await client({ ...config, plugins: [ join(pluginDir, 'get.js') ] }) - expectedEndpoint = `/2015-03-31/functions/${name}/configuration` - - await aws.lambda.GetFunctionConfiguration({ name, host, port }) - request = server.getCurrentRequest() - t.equal(request.url, expectedEndpoint, 'Plugin requested generated endpoint') - t.equal(request.body, undefined, 'Plugin made request without body') - basicRequestChecks(t, 'GET', { url: expectedEndpoint }) - - await aws.lambda.GetFunctionConfiguration({ name, host, port, endpoint: '/foo' }) - request = server.getCurrentRequest() - t.equal(request.url, expectedEndpoint, 'Plugin can override normal client param') - basicRequestChecks(t, 'GET', { url: expectedEndpoint }) - - // Writes - aws = await client({ ...config, plugins: [ join(pluginDir, 'post.js') ] }) - expectedEndpoint = `/2015-03-31/functions/${name}/invocations` - let payload = { ok: true } - - await aws.lambda.Invoke({ name, payload, host, port }) - request = server.getCurrentRequest() - t.equal(request.url, expectedEndpoint, 'Plugin requested generated endpoint') - t.deepEqual(request.body, payload, 'Plugin made request with included payload') - basicRequestChecks(t, 'POST', { url: expectedEndpoint }) - - await aws.lambda.Invoke({ name, data: payload, host, port }) - request = server.getCurrentRequest() - t.deepEqual(request.body, payload, `Payload can be aliased to 'data'`) - - await aws.lambda.Invoke({ name, body: payload, host, port }) - request = server.getCurrentRequest() - t.deepEqual(request.body, payload, `Payload can be aliased to 'body'`) - - await aws.lambda.Invoke({ name, json: payload, host, port }) - request = server.getCurrentRequest() - t.deepEqual(request.body, payload, `Payload can be aliased to 'json'`) - - await aws.lambda.Invoke({ name, payload, host, port, endpoint: '/foo' }) - request = server.getCurrentRequest() - t.equal(request.url, expectedEndpoint, 'Plugin can override normal client param') - basicRequestChecks(t, 'POST', { url: expectedEndpoint }) -}) - -test('Plugins - input validation', async t => { - t.plan(23) +test('Plugins - validate input', async t => { + t.plan(24) let str = 'hi' let num = 123 - let aws = await client({ ...config, plugins: [ join(pluginDir, 'validation.js') ] }) + let aws = await client({ ...config, plugins: [ join(pluginDir, 'validate.js') ] }) // No validation try { @@ -88,6 +39,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({}) } catch (err) { + console.log(err) t.match(err.message, /Missing required parameter: required/, 'Errored on missing required param') } @@ -96,6 +48,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: num }) } catch (err) { + console.log(err) t.match(err.message, /Parameter 'required' must be: string/, 'Errored on wrong required param type') } @@ -104,6 +57,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ arr: true }) } catch (err) { + console.log(err) t.match(err.message, /Parameter 'arr' must be: array/, 'Errored on wrong optional param type') } @@ -112,6 +66,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ disabled: str }) } catch (err) { + console.log(err) t.match(err.message, /Parameter 'disabled' must not be used/, 'Errored on disabled param') } @@ -120,6 +75,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: str, invalidType: str }) } catch (err) { + console.log(err) t.match(err.message, /Invalid type found: invalidType \(lolidk\)/, 'Errored on invalid validation type (string)') } @@ -128,6 +84,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: str, invalidTypeList: str }) } catch (err) { + console.log(err) t.match(err.message, /Invalid type found: invalidTypeList \(listidk\)/, 'Errored on invalid validation type (list)') } @@ -136,6 +93,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: str, invalidTypeType: str }) } catch (err) { + console.log(err) t.match(err.message, /Validator 'type' property must be a string or array/, 'Errored on invalid validation type') } @@ -144,6 +102,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: str, invalidTypeListType: str }) } catch (err) { + console.log(err) t.match(err.message, /Invalid type found: invalidTypeListType \(12345\)/, 'Errored on invalid validation type (list)') } @@ -152,6 +111,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: str, missingType: str }) } catch (err) { + console.log(err) t.match(err.message, /Validator is missing required 'type' property/, 'Errored on missing validation type') } @@ -175,16 +135,27 @@ test('Plugins - input validation', async t => { t.fail(`Incorrect ${k} validation failed`) } catch (err) { + console.log(err) let re = new RegExp(`Parameter '${k}' must be`) t.match(err.message, re, `Incorrect ${k} validation succeeded`) } } + // Initial validation passes, but request() output does not pass validation + try { + await aws.lambda.pluginBreaksValidation({ required: str, arr: [] }) + } + catch (err) { + console.log(err) + t.match(err.message, /Parameter 'arr' must be: array/, 'Errored on wrong param (from type array)') + } + // Type array try { await aws.lambda.testTypes({ required: str, payload: num }) } catch (err) { + console.log(err) t.match(err.message, /Parameter 'payload' must be one of/, 'Errored on wrong param (from type array)') } @@ -193,6 +164,7 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: str, data: num }) } catch (err) { + console.log(err) t.match(err.message, /Parameter 'data' must be one of/, 'Errored on wrong param (from type array, payload alias)') } @@ -201,18 +173,152 @@ test('Plugins - input validation', async t => { await aws.lambda.testTypes({ required: str, payload: str, data: str }) } catch (err) { + console.log(err) t.match(err.message, /Found duplicate payload parameters/, 'Errored on duplicate payload params') } reset() }) -test('Plugins - error handling', async t => { - t.plan(36) +test('Plugins - method construction, request()', async t => { + t.plan(29) + let name = 'my-lambda' + let aws, expectedEndpoint, request + + // Reads + aws = await client({ ...config, plugins: [ join(pluginDir, 'request-get.js') ] }) + expectedEndpoint = `/2015-03-31/functions/${name}/configuration` + + await aws.lambda.GetFunctionConfiguration({ name, host, port }) + request = server.getCurrentRequest() + t.equal(request.url, expectedEndpoint, 'Plugin requested generated endpoint') + t.equal(request.body, undefined, 'Plugin made request without body') + basicRequestChecks(t, 'GET', { url: expectedEndpoint }) + + await aws.lambda.GetFunctionConfiguration({ name, host, port, endpoint: '/foo' }) + request = server.getCurrentRequest() + t.equal(request.url, expectedEndpoint, 'Plugin can override normal client param') + basicRequestChecks(t, 'GET', { url: expectedEndpoint }) + + // Writes + aws = await client({ ...config, plugins: [ join(pluginDir, 'request-post.js') ] }) + expectedEndpoint = `/2015-03-31/functions/${name}/invocations` + let payload = { ok: true } + + await aws.lambda.Invoke({ name, payload, host, port }) + request = server.getCurrentRequest() + t.equal(request.url, expectedEndpoint, 'Plugin requested generated endpoint') + t.deepEqual(request.body, payload, 'Plugin made request with included payload') + basicRequestChecks(t, 'POST', { url: expectedEndpoint }) + + await aws.lambda.Invoke({ name, data: payload, host, port }) + request = server.getCurrentRequest() + t.deepEqual(request.body, payload, `Payload can be aliased to 'data'`) + + await aws.lambda.Invoke({ name, body: payload, host, port }) + request = server.getCurrentRequest() + t.deepEqual(request.body, payload, `Payload can be aliased to 'body'`) + + await aws.lambda.Invoke({ name, json: payload, host, port }) + request = server.getCurrentRequest() + t.deepEqual(request.body, payload, `Payload can be aliased to 'json'`) + + await aws.lambda.Invoke({ name, payload, host, port, endpoint: '/foo' }) + request = server.getCurrentRequest() + t.equal(request.url, expectedEndpoint, 'Plugin can override normal client param') + basicRequestChecks(t, 'POST', { url: expectedEndpoint }) +}) + +test('Plugins - response()', async t => { + t.plan(77) + let aws, payload, response, responseBody, responseHeaders + + aws = await client({ ...config, host, port, plugins: [ join(pluginDir, 'response.js') ] }) + + // Pass through by having no response() method + response = await aws.lambda.NoResponseMethod() + t.equal(response.statusCode, 200, 'Response status code passed through') + t.ok(response.headers, 'Response headers passed through') + t.notOk(response.headers.foo, 'Response headers not mutated') + t.equal(response.payload, null, 'Response payload passed through') + basicRequestChecks(t, 'GET', { url: '/' }) + + // Actively pass through + response = await aws.lambda.Passthrough() + t.equal(response.statusCode, 200, 'Response status code passed through') + t.ok(response.headers, 'Response headers passed through') + t.notOk(response.headers.foo, 'Response headers not mutated') + t.equal(response.payload, null, 'Response payload passed through') + basicRequestChecks(t, 'GET', { url: '/' }) + + // Mutate a single property, but pass the rest through + response = await aws.lambda.MutateProperty() + t.equal(response.statusCode, 234, 'Response status code mutated by plugin') + t.ok(response.headers, 'Response headers passed through') + t.notOk(response.headers.foo, 'Response headers not mutated') + t.equal(response.payload, null, 'Response payload passed through') + basicRequestChecks(t, 'GET', { url: '/' }) + + // Mutate all properties + response = await aws.lambda.MutateAllProperties() + t.equal(response.statusCode, 234, 'Response status code mutated by plugin') + t.equal(response.headers.foo, 'bar', 'Response headers mutated') + t.equal(response.payload.hi, 'there', 'Response payload mutated') + basicRequestChecks(t, 'GET', { url: '/' }) + + responseHeaders = { 'content-type': 'application/json' } + payload = { hi: 'there' } + server.use({ responseHeaders, responseBody: payload }) + response = await aws.lambda.OnlyPassThroughPayload() + t.deepEqual(response, payload, 'Response passed through just the payload') + basicRequestChecks(t, 'GET', { url: '/' }) + + response = await aws.lambda.ReturnWhatever() + t.deepEqual(response, 'yooo', 'Response passed through whatever it wants') + basicRequestChecks(t, 'GET', { url: '/' }) + + // A bit contrived since AWS JSON would normally be returned with an appropriate header, but we are just making sure we can force the entire payload to be unmarhsalled + responseHeaders = { 'content-type': 'application/json' } + responseBody = { aws: { S: 'idk' } } + server.use({ responseHeaders, responseBody }) + response = await aws.lambda.ReturnAwsJsonAll() + t.deepEqual(response.payload, { aws: 'idk' }, 'Returned response payload as parsed, unmarshalled JSON') + t.notOk(response.awsjson, 'awsjson property stripped') + basicRequestChecks(t, 'GET', { url: '/' }) + + // Unmarshall just the payload contents, leaving out headers and status code + responseHeaders = { 'content-type': 'application/json' } + responseBody = { aws: { S: 'idk' } } + server.use({ responseHeaders, responseBody }) + response = await aws.lambda.ReturnAwsJsonPayload() + t.deepEqual(response, { aws: 'idk' }, 'Returned response payload as parsed, unmarshalled JSON') + t.notOk(response.awsjson, 'awsjson property stripped') + basicRequestChecks(t, 'GET', { url: '/' }) + + // Unmarshall an individual payload key + responseHeaders = { 'content-type': 'application/x-amz-json-1.0' } + responseBody = { Item: { aws: { S: 'idk' } }, ok: true } + server.use({ responseHeaders, responseBody }) + response = await aws.lambda.ReturnAwsJsonKey() + t.deepEqual(response.payload, { Item: { aws: 'idk' }, ok: true }, 'Returned response payload as parsed, unmarshalled JSON') + t.notOk(response.awsjson, 'awsjson property stripped') + basicRequestChecks(t, 'GET', { url: '/' }) + + // Response returns nothing + response = await aws.lambda.ReturnNothing() + t.equal(response.statusCode, 200, 'Response status code passed through') + t.ok(response.headers, 'Response headers passed through') + t.notOk(response.headers.foo, 'Response headers not mutated') + t.equal(response.payload, null, 'Response payload passed through') + basicRequestChecks(t, 'GET', { url: '/' }) +}) + +test('Plugins - error(), error handling', async t => { + t.plan(48) let name = 'my-lambda' let payload = { ok: true } let responseBody, responseHeaders, responseStatusCode - let errorsPlugin = join(pluginDir, 'errors.js') + let errorsPlugin = join(pluginDir, 'error.js') let aws = await client({ ...config, plugins: [ errorsPlugin ] }) // Control @@ -235,6 +341,21 @@ test('Plugins - error handling', async t => { console.log(err) t.match(err.message, /\@aws-lite\/client: lambda.requestMethodBlowsUp: Cannot set/, 'Error included basic method information') t.equal(err.service, service, 'Error has service metadata') + t.equal(err.awsDoc, 'https://requestMethodBlowsUp.lol', 'Error has AWS API doc') + t.ok(err.stack.includes(errorsPlugin), 'Stack trace includes failing plugin') + t.ok(err.stack.includes(__filename), 'Stack trace includes this test') + reset() + } + + // Response method fails + try { + await aws.lambda.responseMethodBlowsUp({ name, host, port }) + } + catch (err) { + console.log(err) + t.match(err.message, /\@aws-lite\/client: lambda.responseMethodBlowsUp: Cannot set/, 'Error included basic method information') + t.equal(err.service, service, 'Error has service metadata') + t.equal(err.awsDoc, 'https://responseMethodBlowsUp.lol', 'Error has AWS API doc') t.ok(err.stack.includes(errorsPlugin), 'Stack trace includes failing plugin') t.ok(err.stack.includes(__filename), 'Stack trace includes this test') reset() @@ -253,6 +374,8 @@ test('Plugins - error handling', async t => { t.match(err.message, /\@aws-lite\/client: lambda.errorMethodMutatesError/, 'Error included basic method information') t.equal(err.statusCode, responseStatusCode, 'Error has status code') t.equal(err.service, service, 'Error has service metadata') + t.equal(err.awsDoc, 'https://errorMethodMutatesError.lol', 'Error has AWS API doc') + t.equal(err.readme, 'lolidk', 'Error has custom readme doc') t.equal(err.other, responseBody.other, 'Error has other metadata') t.notOk(err.type, 'Error does not have type (via plugin error)') t.ok(err.stack.includes(__filename), 'Stack trace includes this test') @@ -272,6 +395,8 @@ test('Plugins - error handling', async t => { t.match(err.message, /\@aws-lite\/client: lambda.errorMethodMutatesError/, 'Error included basic method information') t.equal(err.statusCode, responseStatusCode, 'Error has status code') t.equal(err.service, service, 'Error has service metadata') + t.equal(err.awsDoc, 'https://errorMethodMutatesError.lol', 'Error has AWS API doc') + t.equal(err.readme, 'lolidk', 'Error has custom readme doc') t.equal(err.other, responseBody.other, 'Error has other metadata') t.equal(err.type, 'Lambda validation error', 'Error has type (via plugin error)') t.ok(err.stack.includes(__filename), 'Stack trace includes this test') @@ -307,6 +432,7 @@ test('Plugins - error handling', async t => { t.match(err.message, /\@aws-lite\/client: lambda.errorMethodNoop/, 'Error included basic method information') t.equal(err.statusCode, responseStatusCode, 'Error has status code') t.equal(err.service, service, 'Error has service metadata') + t.equal(err.awsDoc, 'https://errorMethodNoop.lol', 'Error has AWS API doc') t.equal(err.other, responseBody.other, 'Error has other metadata') t.notOk(err.type, 'Error does not have type (via plugin error)') t.ok(err.stack.includes(__filename), 'Stack trace includes this test') @@ -325,6 +451,7 @@ test('Plugins - error handling', async t => { console.log(err) t.match(err.message, /\@aws-lite\/client: lambda.errorMethodBlowsUp: Cannot set/, 'Error included basic method information') t.equal(err.service, service, 'Error has service metadata') + t.notOk(err.awsDoc, 'Error does not have a doc') t.notOk(err.other, 'Error does not have other metadata') t.notOk(err.type, 'Error metadata was not mutated') t.ok(err.stack.includes(errorsPlugin), 'Stack trace includes failing plugin') @@ -333,8 +460,31 @@ test('Plugins - error handling', async t => { } }) +test('Plugins - error docs (@aws-lite)', async t => { + t.plan(2) + let aws = await client({ ...config, plugins: [ '@aws-lite/s3' ] }) + + try { + await aws.s3.PutObject() + reset() + } + catch (err) { + console.log(err) + t.equal(err.awsDoc, 'https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html', 'Error has a doc') + t.equal(err.readme, 'https://github.com/architect/aws-lite/blob/main/plugins/s3/readme.md#PutObject', 'Error has link to method in readme') + } +}) + +test('Plugins - disabled methods', async t => { + t.plan(3) + let aws = await client({ ...config, plugins: [ join(pluginDir, 'misc', 'disabled-methods') ] }) + t.ok(aws.lambda.ok, 'Client loaded plugin containing disabled methods') + t.notOk(aws.lambda.disabledByFalsy, 'Client did not load method disabled by boolean false') + t.notOk(aws.lambda.disabledByParam, `Client did not load method disabled by 'disabled' param`) +}) + test('Plugins - plugin validation', async t => { - t.plan(11) + t.plan(12) // CJS try { @@ -371,6 +521,14 @@ test('Plugins - plugin validation', async t => { reset() } + try { + await client({ ...config, plugins: [ join(invalidPlugins, 'invalid-response-method.js') ] }) + } + catch (err) { + t.match(err.message, /All plugin response methods must be a function/, 'Throw on invalid response method') + reset() + } + try { await client({ ...config, plugins: [ join(invalidPlugins, 'invalid-error-method.js') ] }) }