☁️ AWS - CLI
Updated at 2015-09-24 03:12
# Install Python and pip; then...
sudo pip install awscli
# Help is your best friend
aws help
# Authenticate and set your default region
# Never authenticate with root AWS account, create a new user
# for testing, attach AdministratorAccess policy to the new user
# but be warned that even that user is then too privileged for
# production use
aws configure
# You can use cli to check which user your are
aws iam get-user
EC2
aws ec2 describe-regions
# Get all your t2.micro instances
aws ec2 describe-instances --filters "Name=instance-type,Values=t2.micro"
S3
# Creating a bucket.
aws s3 mb s3://your-bucket
# List all S3 buckets.
aws s3 ls
# List files and directories in path.
aws s3 ls s3://my-bucketbucket-name
# Sending a single file to S3
aws s3 cp <from> s3://<to>
aws s3 cp MyFile.txt s3://my-bucket/MyFolder/
# Getting a folder from S3.
aws s3 cp --recursive s3://your-bucket/backup /path/to/dir
# Recursively send all files and directories that are missing from S3 path
# but are present in source directory. It might take a couple of minutes
# to start producing output.
aws s3 sync <from> s3://<to>
aws s3 sync s3://<from> <to>
aws s3 sync . s3://my-bucket/folder
# Same as above, but delete all files that don't exist in the source
aws s3 sync <from> s3://<to> --delete
aws s3 sync . s3://my-bucket/folder --delete
# Same as above, but make local directory to match S3.
aws s3 sync s3://my-bucket/folder . --delete
# Excluding files in sync.
aws s3 sync <from> s3://<to> --exclude '*.txt'
# Conditionally excluding files in sync.
aws s3 sync <from> s3://<to> --exclude '*.txt' --include 'important*.txt'
aws s3 sync <from> s3://<to> --exclude '*' --include 'just_these*'
# Set access permissions to uploaded files.
# Values: private, public-read or public-read-write.
aws s3 sync <from> s3://<to> --acl public-read
# Remove a single file from S3.
aws s3 rm s3://<key>
aws s3 rm s3://my-bucket/folder/folder/file.txt
# Recursively remove a directory from S3
aws s3 rm s3://<key> --recursive
aws s3 rm s3://my-bucket/folder/folder --recursive
# The easiest way to remove a bucket is forcing it to be removed.
aws s3 rb --force s3://bucket-name
# If you upload two files with the same key, they will overwrite each other,
# which is usually the thing you want, but S3 also has versioning,
# it's just disabled by default, but keep in mind that you will have
# to pay for the storage space of all the previous versions
aws s3api put-bucket-versioning \
--bucket your-bucket \
--versioning-configuration Status=Enabled
# To list all versions of an object.
aws s3api list-object-versions --bucket your-bucket