Skip to content

Commit

Permalink
refactor summary table
Browse files Browse the repository at this point in the history
  • Loading branch information
JessyBarrette committed Feb 13, 2024
1 parent ba7f2a2 commit b0e73ad
Showing 1 changed file with 10 additions and 9 deletions.
19 changes: 10 additions & 9 deletions hakai_ckan_records_checks/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,14 @@ def link_issue_page(record_row):
def link_record_page_title(record_row):
if pd.isna(record_row["issues"]):
return ""
return f"<a href='https://catalogue.hakai.org/dataset/{record_row['name']}' target='_blank'>{record_row['title']}</a>"
return f"<a href='https://catalogue.hakai.org/dataset/{record_row['name']}' target='_blank'>Hakai CKAN Record</a>"

summary = summary.dropna(subset=["id", "name", "organization", "title"], how="any")
summary["issues"] = summary.apply(link_issue_page, axis=1)
summary["title"] = summary.apply(link_record_page_title, axis=1)
summary = summary.astype({"resources_count": "int32"})
summary = summary.fillna("")
return summary
summary = summary.assign(
issues=summary.apply(link_issue_page, axis=1),
links=summary.apply(link_record_page_title, axis=1),
)
return summary.astype({"resources_count": "int32"}).fillna("")


def review_records(ckan: str, max_workers) -> dict:
Expand Down Expand Up @@ -115,9 +115,10 @@ def main(ckan_url, api_key, output, max_workers, log_level, cache):
results = pickle.load(file)
else:
results = review_records(ckan, max_workers)
with open(CACHE_FILE, "wb") as file:
logger.info("Caching results")
pickle.dump(results, file)
if cache:
with open(CACHE_FILE, "wb") as file:
logger.info("Caching results")
pickle.dump(results, file)

if not output:
return
Expand Down

0 comments on commit b0e73ad

Please sign in to comment.