aws configure list
$ aws s3 ls s3://mybucket --request-payer requester
$ aws s3 ls s3://hrrrzarr/ --no-sign-request
$ AWS_PROFILE=dev aws s3 ls s3://mybucket
Copy data from bucket to local
AWS_PROFILE=dev aws s3 cp s3://BUCKET test_local.csv
Copy folder from bucket to local
aws s3 cp s3://BUCKET/df.parquet df.parquet --recursive
Sync from bucket to local
aws s3 sync s3://BUCKET/df.parquet df.parquet
Copy data from local to bucket
AWS_PROFILE=dev aws s3 cp test_local.csv s3://BUCKET/
Copy folder from local to bucket
aws s3 cp df.parquet s3://BUCKET/df.parquet/ --recursive
Copy some files from local to bucket
aws s3 sync . s3://BUCKET/ --exclude="*" --include="*.grib"
Copy data between buckets
aws s3 cp s3://source-bucket/ s3://destination-bucket/ --recursive
Delete a folder
aws s3 rm s3://BUCKET/ --recursive
Delete a bucket (only source bucket)
aws s3 rb s3://BUCKET --force
See folder size
aws s3 ls s3://mybucket --recursive --human-readable --summarize
https://registry.opendata.aws/
https://s3.console.aws.amazon.com/s3/buckets/noaa-himawari8/?region=us-east-1
aws s3 ls s3://era5-pds/ --no-sign-request
aws s3 cp s3://era5-pds/zarr/2020/12/data/eastward_wind_at_10_metres.zarr/eastward_wind_at_10_metres.zarr --no-sign-request --recursive
aws s3api list-object-versions --bucket BUCKET --prefix KEY --query "DeleteMarkers[?IsLatest && starts_with(LastModified,'2022-07-07')].{Key:Key,VersionId:VersionId}"
aws s3api list-object-versions --bucket BUCKET --prefix KEY --output json --query 'DeleteMarkers[?IsLatest==`true`].[Key, VersionId]' | jq -r '.[] | "--key '\''" + .[0] + "'\'' --version-id " + .[1]' | xargs -L1 aws s3api delete-object --bucket BUCKET
Restore object from glacier (thaw)
aws s3api restore-object --bucket BUCKET --key KEY --request-payer requester --restore-request Days=10
aws s3api restore-object --bucket BUCKET --key KEY --restore-request GlacierJobParameters={"Tier"="Standard"}
Check if an object is in glacier
AWS_PROFILE=dev aws s3api head-object --bucket BUCKET --key KEY
aws s3api get-object-acl --bucket BUCKET --key KEY
In AWS console -> EC2 -> Launch Instance ->
Default Amazon Machine Image (AMI) (Amazon Linux 2 AMI (HVM)
Ubuntu Server 22.04 LTS
-> choose AMI
Deep Learning AMI GPU PyTorch 2.0.0 (Ubuntu 20.04) 20230530 ami-0e79b2db3e5f19265 (GPU)
Deep Learning AMI GPU PyTorch 2.1.0 (Ubuntu 20.04) 20240208 ami-0da80daf69cab6d24 (GPU)
-> choose instance
t2.micro (1 CPU, 1 RAM)
t2.large (2 CPU, 8 RAM)
r5.4xlarge (16 CPU, 128 RAM)
r5.8xlarge (32 CPU, 256 RAM)
g3.4xlarge (16 CPU, 122 RAM, M60 GPU, 8 GPU RAM, 2048 CUDA CORES, $1.2)
g3.16xlarge (64 CPU, 488 RAM, 4 * M60 GPU, 32 GPU RAM, 8192 CUDA CORES $4.6)
g5.xlarge (4 CPU, 16 RAM, A10 GPU: 24 GPU RAM, 9216 CUDA CORES, $1)
g5.2xlarge (8 CPU, 32 RAM, A10 GPU: 24 GPU RAM, 9216 CUDA CORES, $1.2)
g5.4xlarge (16 CPU, 64 RAM, A10 GPU: 24 GPU RAM, 9216 CUDA CORES, $1.6)
g5.8xlarge (32 CPU, 128 RAM, A10 GPU: 24 GPU RAM, 9216 CUDA CORES $2.5)
g5.16xlarge (64 CPU, 256 RAM, A10 GPU: 24 GPU RAM, 9216 CUDA CORES $4.1)
g5.12xlarge (48 CPU, 192 RAM, 4 * A10 GPU: 96 GPU RAM, 36864 CUDA CORES $5.7)
g5g.xlarge (arm, ...)
p2.xlarge (4 CPU, 61 RAM, K80 GPU: 12 GPU RAM, 4992 CUDA CORES $1)
p2.8xlarge (32 CPU, 488 RAM, 8 * K80 GPU: 96 GPU RAM 39936 CUDA CORES $7.2)
p3.2xlarge (8 CPU, 61 RAM, V100 GPU: 16 GPU RAM; 5120 CUDA CORES, $2.4)
p3.8xlarge (32 CPU, 244 RAM, 4 * V100 GPU: 64 GPU RAM; 5120 CUDA CORES, $12.2)
Crea
In dropdown of IAM select the role you just created (if needed).
Create a key pair and save it locally. This is a .pem file. Put in in the .aws folder. Make sure the key is restrictive
chmod 400 name.pem
SSH in your machine using (Amazon Linux, Ubunut)
ssh -v -i ~/.aws/key.pem ec2-user@Private-IPv4-address
ssh -v -i ~/.aws/key.pem ubuntu@Private-IPv4-address
ssh -v -i ~/.aws/ray.pem ubuntu@IP
ssh -v -i ~/.aws/ray.pem -L 7860:localhost:7860 ubuntu@IP
ssh -v -i ~/.aws/ray.pem -L 8000:localhost:8000 -L 8001:localhost:8001 -L 8002:localhost:8002 -L 8003:localhost:8003 -L 8004:localhost:8004 -L 8005:localhost:8005 -L 8786:localhost:8786 -L 8787:localhost:8787 -L 8888:localhost:8888 -L 8050:localhost:8050 ubuntu@IP
conda init bash
source ~/.bashrc
aws ec2 describe-instances
aws ec2 describe-instances --filter Name=tag:Name,Values=VALUE
AWS_PROFILE=PROFILE aws ec2 describe-instances
AWS_PROFILE=dev aws ec2 describe-instance-attribute --instance-id i-0f76b62b5b1b8bd94 --attribute groupSet
AWS_PROFILE=dev aws ec2 describe-instances --filter Name=tag:Name,Values=VALUE | jq ".Reservations[].Instances[] | {LaunchTime, PrivateIpAddress}"
EC2 launch logs are in sudo tail -f /var/log/cloud-init-output.log
import requests
metadata = requests.get("http://169.254.169.254/latest/dynamic/instance-identity/document").json()
instance_id = metadata["instanceId"]
region = metadata["region"]
See my AMI while on the EC2 machine
curl http://169.254.169.254/latest/meta-data/ami-id
Setup python
wget https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh
bash Miniforge3-Linux-x86_64.sh
exit and log back in
conda install mamba -c conda-forge --y
mamba create -n test_env python=3.10 --y
aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin ACCOUNT.dkr.ecr.us-east-1.amazonaws.com
list
aws ecr list-images --repository-name REPO_NAME
aws ecr describe-images --repository-name REPO_NAME --image-ids imageTag=TAG
date -r $(aws ecr describe-images --repository-name REPO_NAME --image-ids imageTag=TAG | jq '.imageDetails[0].imagePushedAt')
pull
docker pull ACCOUNT.dkr.ecr.us-east-1.amazonaws.com/REPO:ID
docker run -it ACCOUNT.dkr.ecr.us-east-1.amazonaws.com/REPO:ID/bin/bash
docker image ls | grep ID
docker run -it IMAGE ID /bin/bash
docker pull ACCOUNT.dkr.ecr.us-east-1.amazonaws.com/metaflow-gpu:latest
docker run -it ID /bin/bash
AWS_PROFILE=PROFILE aws iam create role --role-name ROLE-NAME --assume-role-policy-document file://Policy.json
aws iam list-attached-role-policies --role-name ROLE-NAME
aws iam detach-role-policy --role-name ROLE-NAME --policy-arn arn:aws:iam::aws:policy/AmazonSSMPatchAssociation
aws batch describe-job-queues
aws batch list-jobs --job-queue JOBQUEUE --job-status FAILED
aws batch list-jobs --job-queue JOBQUEUE --job-status PENDING
aws batch list-jobs --job-queue JOBQUEUE --job-status RUNNING
aws batch list-jobs --job-queue JOBQUEUE --filters "name=JOB_NAME,values=VALUES"
aws batch terminate-job --job-id JOBID --reason "Terminating job."
aws dynamodb list-tables --region us-east-1
aws dynamodb list-tables --region eu-west-1
AWS_PROFILE=dev aws dynamodb list-tables
aws dynamodb scan --table-name <TABLE_NAME> --select "COUNT"
aws events list-rules
aws events describe-rule --name "RULE"
aws events describe-event-bus --name "RULE"
AWS_PROFILE=dev aws cloudformation create-stack /
--stack-name ec2-example --template-body file://cfn.yaml --capabilities CAPABILITY_NAMED_IAM CAPABILITY_IAM \
--parameters ParameterKey=env,ParameterValue=dev --tags Key=tag1key,Value=tag1value Key=tag2key,Value=tag2value
AWS_PROFILE=dev aws cloudformation delete-stack --stack-name ec2-example
AWS_PROFILE=dev aws cloudformation describe-stacks
aws cloudformation describe-stacks --stack-name STACKNAME
aws sts get-caller-identity
aws secretsmanager get-secret-value --secret-id MyTestSecret
populates /Users/raybell/.kube/config
AWS_PROFILE=prd aws eks update-kubeconfig --name dtn-main-c9 --alias "ALIAS"
aws bedrock list-foundation-models --region=us-east-1 --by-provider anthropic --query "modelSummaries[*].modelId"
aws bedrock list-inference-profiles