Skip to content

Latest commit

 

History

History
executable file
·
793 lines (620 loc) · 19.6 KB

helper_AWS-CLI.md

File metadata and controls

executable file
·
793 lines (620 loc) · 19.6 KB

GitHub license GitHub stars GitHub forks GitHub issues GitHub watchers

1. aws cli v2

1.1. Install

$ curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
$ unzip awscliv2.zip
$ sudo ./aws/install

1.2. Environment Variables

$ export AWS_ACCESS_KEY_ID=<access_key>
$ export AWS_SECRET_ACCESS_KEY=<secret_key>
$ export AWS_DEFAULT_REGION=us-west-2
$ aws configure
AWS Access Key ID [None]: 
AWS Secret Access Key [None]: 
Default region name [None]:ap-northeast-1
Default output format [None]:json

A. AWS_SHARED_CREDENTIALS_FILE - ~/.aws/credentials

$ echo $AWS_SHARED_CREDENTIALS_FILE

$ cat ~/.aws/credentials
[default]
aws_access_key_id = 
aws_secret_access_key =

B. AWS_CONFIG_FILE - ~/.aws/config

$ echo $AWS_CONFIG_FILE

$ cat ~/.aws/config
[default]
region = ap-northeast-1
output = json
cli_binary_format=raw-in-base64-out

C. Others

AWS_ROLE_ARN
AWS_WEB_IDENTITY_TOKEN_FILE
AWS_ROLE_SESSION_NAME

AWS_CONTAINER_CREDENTIALS_RELATIVE_URI

AWS_EC2_METADATA_DISABLED

1.3. aws Usage

$ aws --version
aws-cli/2.4.13 Python/3.8.8 Linux/5.15.0-52-generic exe/x86_64.ubuntu.20 prompt/off

$ aws

usage: aws [options] <command> <subcommand> [<subcommand> ...] [parameters]
To see help text, you can run:

  aws help
  aws <command> help
  aws <command> <subcommand> help

aws: error: the following arguments are required: command

$ aws help
$ aws <command> help
$ aws <command> <subcommand> help

$ aws sts get-caller-identity

2. AWS Services

flowchart TD
	subgraph Amazon
		DynamoDB
	end

	subgraph ubuntu
  subgraph awsCli[aws cli]
		awsCliDynamodb[aws dynamodb xxx]
	end
	end
	DynamoDB <--> awsCliDynamodb

	classDef yellow fill:#FFFFCC
	classDef pink fill:#FFCCCC
	classDef blue fill:#0000FF
	classDef lightblue fill:#ADD8E6

	class DynamoDB pink
	class awsCliDynamodb pink
Loading
$ aws dynamodb create-table \
    --table-name Music \
    --attribute-definitions \
        AttributeName=Artist,AttributeType=S \
        AttributeName=SongTitle,AttributeType=S \
    --key-schema \
        AttributeName=Artist,KeyType=HASH \
        AttributeName=SongTitle,KeyType=RANGE \
    --provisioned-throughput \
        ReadCapacityUnits=5,WriteCapacityUnits=5 \
    --table-class STANDARD
{
    "TableDescription": {
        "AttributeDefinitions": [
            {
                "AttributeName": "Artist",
                "AttributeType": "S"
            },
            {
                "AttributeName": "SongTitle",
                "AttributeType": "S"
            }
        ],
        "TableName": "Music",
        "KeySchema": [
            {
                "AttributeName": "Artist",
                "KeyType": "HASH"
            },
            {
                "AttributeName": "SongTitle",
                "KeyType": "RANGE"
            }
        ],
        "TableStatus": "CREATING",
        "CreationDateTime": "2022-04-13T13:19:42.957000+08:00",
        "ProvisionedThroughput": {
            "NumberOfDecreasesToday": 0,
            "ReadCapacityUnits": 5,
            "WriteCapacityUnits": 5
        },
        "TableSizeBytes": 0,
        "ItemCount": 0,
        "TableArn": "arn:aws:dynamodb:ap-northeast-1:123456789012:table/Music",
        "TableId": "25e42645-c399-4c68-aac6-f7b7084bd23e",
        "TableClassSummary": {
            "TableClass": "STANDARD"
        }
    }
}
An error occurred (ResourceInUseException) when calling the CreateTable operation: Table already exists: Music
$ aws dynamodb describe-table --table-name Music | grep TableStatus
"TableStatus": "ACTIVE",
$ aws dynamodb execute-statement --statement "INSERT INTO Music  \
                VALUE  \
                {'Artist':'No One You Know','SongTitle':'Call Me Today', 'AlbumTitle':'Somewhat Famous', 'Awards':'1'}"

$ aws dynamodb execute-statement --statement "INSERT INTO Music  \
                VALUE  \
                {'Artist':'No One You Know','SongTitle':'Howdy', 'AlbumTitle':'Somewhat Famous', 'Awards':'2'}"

$ aws dynamodb execute-statement --statement "INSERT INTO Music  \
                VALUE  \
                {'Artist':'Acme Band','SongTitle':'Happy Day', 'AlbumTitle':'Songs About Life', 'Awards':'10'}"
                            
$ aws dynamodb execute-statement --statement "INSERT INTO Music  \
                VALUE  \
                {'Artist':'Acme Band','SongTitle':'PartiQL Rocks', 'AlbumTitle':'Another Album Title', 'Awards':'8'}"
{
    "Items": []
}
An error occurred (DuplicateItemException) when calling the ExecuteStatement operation: Duplicate primary key exists in table
$ aws dynamodb get-item --consistent-read \
    --table-name Music \
    --key '{ "Artist": {"S": "Acme Band"}, "SongTitle": {"S": "Happy Day"}}'
{
    "Item": {
        "AlbumTitle": {
            "S": "Songs About Life"
        },
        "Awards": {
            "S": "10"
        },
        "Artist": {
            "S": "Acme Band"
        },
        "SongTitle": {
            "S": "Happy Day"
        }
    }
}
$ aws dynamodb scan --table-name Music
{
    "Items": [
        {
            "AlbumTitle": {
                "S": "Somewhat Famous"
            },
            "Awards": {
                "S": "1"
            },
            "Artist": {
                "S": "No One You Know"
            },
            "SongTitle": {
                "S": "Call Me Today"
            }
        },
        {
            "AlbumTitle": {
                "S": "Somewhat Famous"
            },
            "Awards": {
                "S": "2"
            },
            "Artist": {
                "S": "No One You Know"
            },
            "SongTitle": {
                "S": "Howdy"
            }
        },
        {
            "AlbumTitle": {
                "S": "Songs About Life"
            },
            "Awards": {
                "S": "10"
            },
            "Artist": {
                "S": "Acme Band"
            },
            "SongTitle": {
                "S": "Happy Day"
            }
        },
        {
            "AlbumTitle": {
                "S": "Another Album Title"
            },
            "Awards": {
                "S": "8"
            },
            "Artist": {
                "S": "Acme Band"
            },
            "SongTitle": {
                "S": "PartiQL Rocks"
            }
        }
    ],
    "Count": 4,
    "ScannedCount": 4,
    "ConsumedCapacity": null
}
flowchart TD
	subgraph Amazon
		EC2
	end

	subgraph ubuntu
	subgraph awsCli[aws cli]
		awsCliEC2[aws ec2 xxx]
	end
		sshClient[ssh client]
  end
	EC2 <--> awsCliEC2
	EC2 <--> sshClient

	classDef yellow fill:#FFFFCC
	classDef pink fill:#FFCCCC
	classDef blue fill:#0000FF
	classDef lightblue fill:#ADD8E6

	class EC2 pink
	class awsCliEC2 pink
	class sshClient pink
Loading

因為本身就是虛擬運算,設定起來也較複雜,不建議使用 AWS CLI;請多加使用 Dashboard。

2.2.3. ec2metadata xxx

# please use ssh to link with ec2
$ ec2metadata --instance-id
i-01234567890abcdef

$ ec2metadata --instance-type
t3.medium

$ ec2metadata --public-ipv4
199.199.199.199

$ ec2metadata --public-hostname
ec2-199-199-199-199.eu-west-1.compute.amazonaws.com

or
$ curl http://169.254.169.254/latest/meta-data/public-hostname
ec2-199-199-199-199.eu-west-1.compute.amazonaws.com
flowchart TD
	subgraph Amazon
		S3
	end

	subgraph ubuntu
	subgraph awsCli[aws cli]
		awsCliS3[aws s3 xxx]
	end
  end
	S3 <--> awsCliS3

	classDef yellow fill:#FFFFCC
	classDef pink fill:#FFCCCC
	classDef blue fill:#0000FF
	classDef lightblue fill:#ADD8E6

	class S3 pink
	class awsCliS3 pink
Loading

2.3.2. aws s3 xxx

copies a single file to a specified bucket and key

$ aws s3 cp s3://utilx9/demo_000.c ./
$ aws s3 demo_000.c cp s3://utilx9
# to create a folder - demo
$ aws s3 demo_000.c cp s3://utilx9/demo/demo_000.c

copies a single file to a specified bucket and key

$ aws s3 ls
$ aws s3 ls s3://utilx9

creates a bucket

$ aws s3 mb s3://helperx

move a file

$ aws s3 mv s3://helperx/README.md s3://helperx/README_bak.md

removes a bucket

$ aws s3 rb s3://helperx

deletes a single s3 object

$ aws s3 rm s3://utilx9/demo_000.c

Syncs directories and S3 prefixes

$ aws s3 sync s3://helperx s3://helperx_Bak

2.3.3. aws s3api

Returns the notification configuration of a bucket.

$ aws s3api get-bucket-notification-configuration \
	--bucket lambdax9 --output yaml

$ aws s3api get-bucket-notification-configuration \
	--bucket lambdax9 --output json
  • s3_notification.yml
LambdaFunctionConfigurations:
- Events:
  - s3:ObjectCreated:*
  - s3:ObjectRemoved:*
  Id: bbecdad0-0539-4d92-a563-1ae812316dd0
  LambdaFunctionArn: arn:aws:lambda:us-west-1:123456789012:function:LambdaHello-function-bLJNjfr7pRL5

Enables notifications of specified events for a bucket.

  • s3_notification.json
$ aws s3api put-bucket-notification-configuration \
	--bucket lambdax9 \
	--notification-configuration file://s3_notification.json
{
	"LambdaFunctionConfigurations": [{
			"LambdaFunctionArn": "arn:aws:lambda:us-west-1:123456789012:function:LambdaHello-function-bLJNjfr7pRL5",
			"Events": [
				"s3:ObjectCreated:*",
				"s3:ObjectRemoved:*"
			]
		}
	]
}
  • s3_notification_null.json
$ aws s3api put-bucket-notification-configuration \
	--bucket lambdax9 \
	--notification-configuration file://s3_notification_null.json
{
}

因為不適用正常檔案存取方式,先不花時間研究。

2.4.1. S3 Glacier

publish a message to AmazonSNS, then send (protocol) to subscriber(s)

flowchart LR
	awsCli[aws sns publish]
	Lanka["lankahsu@gmail.com"]
	Mary["mary@gmail.com"]

	AmazonSNS[AmazonSNS]

	awsCli --> |publish| AmazonSNS
	Lanka <-.-> | email / arn:aws:sns:us-west-1:123456789012:lankahsu520| AmazonSNS
	Mary <-.-> | email / arn:aws:sns:us-west-1:123456789012:lankahsu520| AmazonSNS
Loading
$ aws sns create-topic --name lankahsu520
{
    "TopicArn": "arn:aws:sns:eu-west-1:123456789012:lankahsu520"
}
$ aws sns delete-topic --topic-arn arn:aws:sns:eu-west-1:123456789012:lankahsu520

The protocol that you want to use. Supported protocols include:

  • http – delivery of JSON-encoded message via HTTP POST
  • https – delivery of JSON-encoded message via HTTPS POST
  • email – delivery of message via SMTP
  • email-json – delivery of JSON-encoded message via SMTP
  • sms – delivery of message via SMS
  • sqs – delivery of JSON-encoded message to an Amazon SQS queue
  • application – delivery of JSON-encoded message to an EndpointArn for a mobile app and device
  • lambda – delivery of JSON-encoded message to an Lambda function
  • firehose – delivery of JSON-encoded message to an Amazon Kinesis Data Firehose delivery stream.
$ aws sns subscribe --topic-arn arn:aws:sns:us-west-1:123456789012:lankahsu520 --protocol email --notification-endpoint lankahsu@gmail.com
{
    "SubscriptionArn": "pending confirmation"
}
$ aws sns list-subscriptions
{
    "Subscriptions": [
        {
            "SubscriptionArn": "arn:aws:sns:eu-west-1:123456789012:lankahsu520:9aaa9c9a-8dba-45b6-999b-7871bad374d2",
            "Owner": "123456789012",
            "Protocol": "email",
            "Endpoint": "lankahsu@gmail.com",
            "TopicArn": "arn:aws:sns:eu-west-1:123456789012:lankahsu520"
        },
    ]
}
$ aws sns list-topics
$ aws sns publish --topic-arn arn:aws:sns:us-west-1:123456789012:lankahsu520 --message "Hello World!"
{
    "MessageId": "2e91a93d-8b5c-5f66-8da9-f5224ebfb6f2"
}
$ aws sns unsubscribe --subscription-arn arn:aws:sns:eu-west-1:123456789012:lankahsu520:9aaa9c9a-8dba-45b6-999b-7871bad374d2
$ aws sns list-subscriptions
{
    "Subscriptions": [
        {
            "SubscriptionArn": "arn:aws:sns:eu-west-1:123456789012:lankahsu520:9aaa9c9a-8dba-45b6-999b-7871bad374d2",
            "Owner": "123456789012",
            "Protocol": "email",
            "Endpoint": "lankahsu@gmail.com",
            "TopicArn": "arn:aws:sns:eu-west-1:123456789012:lankahsu520"
        },
    ]
}

Appendix

I. Study

II. Debug

II.2. Official - An error occurred (NoSuchBucket) when calling the ListObjectsV2 operation: The specified bucket does not exist

至 S3 console 建立該 Bucket,再用 aws s3 操作即可。

這有可能只是aws 自身的 bug 。

III. Glossary

IV. Tool Usage

IV.1. bash

A. .bash_aliases

#******************************************************************************
#** aws **
#******************************************************************************
export S3_BUCKET_NAME=intercom-storage

function aws-path()
{
	echo "S3_BUCKET_NAME=${S3_BUCKET_NAME}"
}

function aws-ls()
{
	aws-path
	aws s3 ls s3://${S3_BUCKET_NAME}
}

function aws-mb()
{
	HINT="Usage: ${FUNCNAME[0]} <bucket>"
	BUCKET1="$1"

	if [ ! -z "$BUCKET1" ]; then
		aws s3 mb s3://$BUCKET1
	else
		echo $HINT
	fi
}

function aws-rb()
{
	HINT="Usage: ${FUNCNAME[0]} <bucket>"
	BUCKET1="$1"

	if [ ! -z "$BUCKET1" ]; then
		aws s3 rb s3://$BUCKET1
	else
		echo $HINT
	fi
}

#alias aws-pull="aws-path; aws s3 cp s3://${S3_BUCKET_NAME}/$1 ./"
function aws-pull()
{
	aws-path

	HINT="Usage: ${FUNCNAME[0]} <file>"
	FILE1="$1"

	if [ ! -z "$FILE1" ]; then
		aws s3 cp s3://${S3_BUCKET_NAME}/$FILE1 ./
		#echo "aws s3 cp s3://${S3_BUCKET_NAME}/$FILE1 ./"
	else
		echo $HINT
	fi
}

#alias aws-push="aws s3 cp ${1} s3://${S3_BUCKET_NAME}"
function aws-push()
{
	aws-path

	HINT="Usage: ${FUNCNAME[0]} <file>"
	FILE1="$1"

	if [ ! -z "$FILE1" ]; then
		aws s3 cp $FILE1 s3://${S3_BUCKET_NAME}/
		#echo "aws s3 cp s3://${S3_BUCKET_NAME}/$FILE1"
	else
		echo $HINT
	fi
}

function aws-pull-bash_aliases()
{
	(cd /tmp; aws-pull .bash_aliases; chmod 775 .bash_aliases; cp .bash_aliases ~/)
}

function aws-push-bash_aliases()
{
	(aws-push ~/.bash_aliases)
}

#alias aws-rm="aws s3 rm s3://${S3_BUCKET_NAME}/$1"
function aws-rm()
{
	aws-path

	HINT="Usage: ${FUNCNAME[0]} <file>"
	FILE1="$1"

	if [ ! -z "$FILE1" ]; then
		aws s3 rm s3://${S3_BUCKET_NAME}/$FILE1
		#echo "aws s3 rm s3://${S3_BUCKET_NAME}/$FILE1"
	else
		echo $HINT
	fi
}

Author

Created and designed by Lanka Hsu.

License

HelperX is available under the BSD-3-Clause license. See the LICENSE file for more info.