Skip to content

Commit

Permalink
Merge pull request #756 from cristim/opensearch-crash-fix
Browse files Browse the repository at this point in the history
Fix crash in OpenSearch scraper
  • Loading branch information
brookemckim authored Jan 4, 2025
2 parents 44e9bf6 + 0944c71 commit fa70664
Showing 1 changed file with 11 additions and 13 deletions.
24 changes: 11 additions & 13 deletions opensearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,16 +114,18 @@ def scrape(output_file, input_file=None):

# loop through products, and only fetch available instances for now
for sku, product in tqdm(six.iteritems(data["products"])):
if (
product.get("productFamily", None) == "Amazon OpenSearch Service Instance"
if (product.get("productFamily", None) == "Amazon OpenSearch Service Instance"
and product.get("attributes", {}).get("operation", None)
!= "DirectQueryAmazonS3GDCOCU"
):
attributes = product["attributes"]
!= "DirectQueryAmazonS3GDCOCU"):

attributes = product.get("attributes", {})
if "instanceType" not in attributes:
continue

instance_type = attributes["instanceType"]

# map the region
location = ec2.canonicalize_location(attributes["location"])
instance_type = attributes["instanceType"]
if location == "Any":
region = "us-east-1"
elif location == "Asia Pacific (Osaka-Local)":
Expand All @@ -149,9 +151,7 @@ def scrape(output_file, input_file=None):

if instance_type not in instances.keys():
# delete some attributes that are inconsistent among skus
new_attributes = (
attributes.copy()
) # make copy so we can keep these attributes with the sku
new_attributes = attributes.copy() # make copy so we can keep these attributes with the sku
new_attributes.pop("location", None)
new_attributes.pop("locationType", None)
new_attributes.pop("operation", None)
Expand Down Expand Up @@ -291,9 +291,7 @@ def scrape(output_file, input_file=None):
"yrTerm3.noUpfront-hrs"
]

instances[instance_type]["pricing"][region][
"reserved"
] = reserved_prices
instances[instance_type]["pricing"][region]["reserved"] = reserved_prices
except Exception as e:
print(
"ERROR: Trouble generating Cache reserved price for {}: {!r}".format(
Expand All @@ -316,4 +314,4 @@ def scrape(output_file, input_file=None):
input_file = sys.argv[1]

output_file = "./www/opensearch/instances.json"
scrape(output_file, input_file)
scrape(output_file, input_file)

0 comments on commit fa70664

Please sign in to comment.