Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix region signature mismatch issue #24

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 49 additions & 12 deletions tinyhost/tinyhost.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,28 @@
import secrets
import string
import tempfile
from typing import Optional

import boto3
import click
import magic
from botocore.config import Config
from botocore.exceptions import ClientError, NoCredentialsError
from bs4 import BeautifulSoup

# Create an S3 client using boto3
s3_client = boto3.client("s3")
session = boto3.session.Session()
aws_region = session.region_name or "us-east-1"

# Create the S3 client with explicit endpoint configuration
s3_client = boto3.client(
"s3",
region_name=aws_region,
config=Config(
signature_version='s3v4',
s3={'addressing_style': 'virtual'}
)
)


@click.command()
Expand Down Expand Up @@ -147,7 +160,12 @@ def tinyhost(html_file: str, bucket: str, prefix: str, duration: int, reset: boo
)

signed_url = s3_client.generate_presigned_url(
"get_object", Params={"Bucket": bucket, "Key": s3_key}, ExpiresIn=duration
'get_object',
Params={
'Bucket': bucket,
'Key': s3_key
},
ExpiresIn=duration
)

if signed_url:
Expand Down Expand Up @@ -188,8 +206,6 @@ def get_datastore_presigned_urls(bucket: str, prefix: str, datastore_id: str, du
MAX_DATASTORE_SIZE = 2 * 1024 * 1024 # 2 Megabytes
object_key = f"{prefix}/{datastore_id}.json"

# Check if object key exists, if not, make one, with the content {}
# and the right ContentType
try:
s3_client.head_object(Bucket=bucket, Key=object_key)
print(f"Object {object_key} exists.")
Expand All @@ -202,16 +218,23 @@ def get_datastore_presigned_urls(bucket: str, prefix: str, datastore_id: str, du
raise e

get_url = s3_client.generate_presigned_url(
"get_object", Params={"Bucket": bucket, "Key": object_key}, ExpiresIn=duration
'get_object',
Params={
'Bucket': bucket,
'Key': object_key
},
ExpiresIn=duration
)

# POST is used for the writing side, because it's the only way to ensure a maximum length
post_conditions = [
["content-length-range", 0, MAX_DATASTORE_SIZE],
]

post_dict = s3_client.generate_presigned_post(
Bucket=bucket, Key=object_key, Conditions=post_conditions, ExpiresIn=duration
Bucket=bucket,
Key=object_key,
Conditions=post_conditions,
ExpiresIn=duration
)

return get_url, post_dict
Expand All @@ -226,7 +249,7 @@ def compute_sha1_hash(file_path: str) -> str:
return sha1.hexdigest()


def run_new_bucket_flow() -> str:
def run_new_bucket_flow() -> Optional[str]:
sts_client = boto3.client("sts")
identity = sts_client.get_caller_identity()
arn = identity["Arn"]
Expand All @@ -243,11 +266,25 @@ def run_new_bucket_flow() -> str:
error_code = e.response["Error"]["Code"]
if error_code == "404":
click.echo(f"Bucket {bucket} does not exist, attempting to create")

s3_client.create_bucket(Bucket=bucket)
return bucket

try:
# For regions other than us-east-1, we need to specify LocationConstraint
if aws_region == "us-east-1":
s3_client.create_bucket(Bucket=bucket)
else:
s3_client.create_bucket(
Bucket=bucket,
CreateBucketConfiguration={
'LocationConstraint': aws_region
}
)
return bucket
except ClientError as ce:
click.echo(f"Failed to create bucket: {ce}")
return None
else:
raise RuntimeError(f"Error checking bucket existence: {e}")
click.echo(f"Error checking bucket existence: {e}")
return None


if __name__ == "__main__":
Expand Down