import boto3
aws_creds = boto3.Session().get_credentials()
AWS_KEY = aws_creds.access_key
AWS_SECRET = aws_creds.secret_key
AWS_TOKEN = aws_creds.token
storage_options = {
"key": aws_creds.access_key,
"secret": aws_creds.secret_key,
"token": aws_creds.token,
}
config = {"credentials": {"domain": domain, "jwt": jwt_token}}
import boto3
import io
s3 = boto3.client('s3')
s3.list_buckets()
s3.list_object_versions(
Bucket="BUCKET",
Prefix="KEY",
)
bytesio = io.BytesIO()
df.to_parquet(bytesio)
s3.put_object(Body=bytesio.getvalue(), Bucket="BUCKET", Key="discovery/file.parquet")
with open("test.csv", 'rb') as f:
s3.upload_fileobj(f, "BUCKET", "test.csv")
s3.delete_object(Bucket="BUCKET", Key="discovery/file.parquet")
bucket_name = 'your-bucket-name'
key = 'path/to/your/file'
# Initiate a restore request for the file
response = s3.restore_object(
Bucket=bucket_name,
Key=key,
RestoreRequest={
'Days': 7, # The number of days to keep the object in the restored state
'GlacierJobParameters': {
'Tier': 'Bulk' # The restore speed option (Expedited, Standard, or Bulk)
}
}
)
import requests
import boto3
ec2 = boto3.client("ec2")
ec2.describe_tags()
metadata = requests.get("http://169.254.169.254/latest/dynamic/instance-identity/document").json()
instance_id = metadata["instanceId"]
ec2.describe_tags(Filters=[{"Name": "resource-id", "Values": [instance_id]}])["Tags"]
import boto3
autoscaling = boto3.client("autoscaling")
autoscaling.describe_auto_scaling_groups(AutoScalingGroupNames=["NAME"])
autoscaling.update_auto_scaling_group(AutoScalingGroupName="NAME", MinSize=0, MaxSize=0, DesiredCapacity=0)
import boto3
iam = boto3.client("iam")
roles = iam.list_roles()
iam.list_attached_role_policies(RoleName="NAME")
import boto3
sts = boto3.client("sts")
sts_session = sts.assume_role(RoleArn="arn:aws:iam::ACCOUNT:role/ARN", RoleSessionName="session")
KEY_ID = sts_session['Credentials']['AccessKeyId']
ACCESS_KEY = sts_session['Credentials']['SecretAccessKey']
TOKEN = sts_session['Credentials']['SessionToken']
import boto3
batch = boto3.client("batch")
batch.list_jobs()
batch.list_jobs(jobQueue="myJobQueue, filters=[{"name": "JOB_NAME", "values": ["my_job"]}]
batch.describe_compute_environments(computeEnvironments=["DTZBatchComputeEnvironment-prd-v3"])
import boto3
import pandas as pd
pd.set_option("display.max_rows", 150)
pd.set_option("display.max_columns", 150)
pd.set_option("display.max_colwidth", None)
bedrock = boto3.client("bedrock", region_name="us-east-1")
pd.json_normalize(bedrock.list_foundation_models()["modelSummaries"])
pd.json_normalize(bedrock.list_inference_profiles()["inferenceProfileSummaries"])
import json
import boto3
prompt = "Why is the sky blue?"
prefill = ""
system = ""
# model_id = "anthropic.claude-3-5-sonnet-20241022-v2:0"
# model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0"
# model_id = "anthropic.claude-3-haiku-20240307-v1:0"
model_id = "anthropic.claude-3-5-haiku-20241022-v1:0"
bedrock_runtime = boto3.client(service_name="bedrock-runtime")
body = json.dumps(
{
"anthropic_version": "",
"max_tokens": 2000,
"messages": [
{"role": "user", "content": prompt},
{"role": "assistant", "content": prefill}
],
"temperature": 0.0,
"top_p": 1,
"system": system
}
)
response = bedrock_runtime.invoke_model(body=body, modelId=model_id)
json.loads(response.get("body").read()).get("content")[0].get("text")
import json
import boto3
model_id = "anthropic.claude-3-5-sonnet-20240620-v1:0"
system = [{"text": "You are an app that creates playlists for a radio station that plays rock and pop music. Only return song names and the artist."}]
messages = []
message_1 = {"role": "user", "content": [{"text": "Create a list of 3 pop songs."}]}
messages.append(message_1)
bedrock_runtime = boto3.client(service_name="bedrock-runtime")
response = bedrock_runtime.converse(
modelId=model_id,
messages=messages,
system=system,
)
output_message = response['output']['message']
messages.append(output_message)
message_2 = {"role": "user", "content": [{"text": "Make sure the songs are by artists from the United Kingdom."}]}
messages.append(message_2)
response = bedrock_runtime.converse(
modelId=model_id,
messages=messages,
system=system,
)
output_message = response['output']['message']
messages.append(output_message)
import boto3
from decimal import Decimal
dynamodb = boto3.client("dynamodb")
dynamodb = boto3.client('dynamodb', endpoint_url='http://localhost:5000')
dynamodb = boto3.client("dynamodb", aws_access_key_id=KEY_ID, aws_secret_access_key=ACCESS_KEY, aws_session_token=TOKEN, region_name="eu-west-1")
dynamodb.list_global_tables()
dynamodb.describe_table(TableName="NAME")
response = dynamodb.describe_table(TableName=table_name)
attribute_definitions = response['Table']['AttributeDefinitions']
for attribute in attribute_definitions:
print(attribute['AttributeName'], attribute['AttributeType'])
dynamodb = boto3.resource("dynamodb", aws_access_key_id=KEY_ID, aws_secret_access_key=ACCESS_KEY, aws_session_token=TOKEN, region_name="eu-west-1")
table = dynamodb.Table("TABLE")
table.scan(Limit=10, FilterExpression='rd = :rd_value', ExpressionAttributeValues={':rd_value': "2023-02-02T06:29:41Z"})
table.scan(Limit=10, FilterExpression='ms = :ms_value', ExpressionAttributeValues={':rd_value': Decimal(636020822)"})
import boto3
secretsmanager = boto3.client('secretsmanager', region_name="us-east-1")
secretsmanager.get_secret_value(SecretId="ID")