Skip to content

Commit

Permalink
Images: script ubuntu image fetch
Browse files Browse the repository at this point in the history
  • Loading branch information
picnoir committed Mar 28, 2024
1 parent 8ed95ba commit b5ce328
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 5 deletions.
7 changes: 5 additions & 2 deletions .github/workflows/update-images.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,10 @@ jobs:
- name: Run update-images.py
run: |
scripts/update-images.py
cat debian.json | jq -S > debian/images.json
rm debian.json
for distro in ubuntu debian; do
cat "$distro".json | jq -S > "$distro"/images.json
rm "$distro".json
done
if [[ -z $(git status -s) ]]; then
echo "Images up to date"
exit 0
Expand All @@ -29,6 +31,7 @@ jobs:
branchname="update-images-$(date +%F)"
git checkout -b "${branchname}"
git add debian/images.json
git add ubuntu/images.json
git commit -m "Update images: $(date +%F)"
git push origin "${branchname}"
gh pr create -B main -H "${branchname}" \
Expand Down
56 changes: 55 additions & 1 deletion scripts/update-images.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def get_latest_debian_image(url):
url = f"{url}/{latest[1]}"
print(f"[+] Parsing latest entry: {url}")

# Step 2: parse entry
# Step 2: retrieve images
page = requests.get(url)
soup = BeautifulSoup(page.content, "html.parser")
rows = soup.find_all("tr")
Expand Down Expand Up @@ -66,7 +66,61 @@ def debian_parse():
}
return json.dumps(res)

def get_latest_ubuntu_image(url):
print(f"[+] Parsing ubuntu index {url}")
# Step 1: retrieve the latest entry
page = requests.get(url)
soup = BeautifulSoup(page.content, "html.parser")
links = soup.find_all("a")
l = [link["href"] for link in links if re.compile("^release-.*[0-9]{8}/").match(link["href"])]
parsed_l = [(datetime.strptime(s[8:-1], "%Y%m%d"), s) for s in l]
latest = max(parsed_l)

# Step 2: retrieve images
url = f"{url}{latest[1]}"
print(f"[+] Parsing latest entry: {url}")
page = requests.get(url)
soup = BeautifulSoup(page.content, "html.parser")
links = soup.find_all("a")
res = {}
for link in links:
if re.compile(".*-server-cloudimg.*\.img$").match(link["href"]):
link = link["href"]
if "amd64" in link:
res["x86_64-linux"] = f"{url}{link}"
elif "arm64" in link:
res["aarch64-linux"] = f"{url}{link}"
return res

def ubuntu_parse():
mantic_url = "https://cloud-images.ubuntu.com/releases/23.10/"
lunar_url = "https://cloud-images.ubuntu.com/releases/23.04/"
kinetic_url = "https://cloud-images.ubuntu.com/releases/22.10/"
jammy_url = "https://cloud-images.ubuntu.com/releases/22.04/"
focal_url = "https://cloud-images.ubuntu.com/releases/focal/"
mantic = get_latest_ubuntu_image(mantic_url)
lunar = get_latest_ubuntu_image(lunar_url)
kinetic = get_latest_ubuntu_image(kinetic_url)
jammy = get_latest_ubuntu_image(jammy_url)
focal = get_latest_ubuntu_image(focal_url)

res = {}
def gen_entry_dict(entry):
return { "name": entry, "hash": nix_hash(entry) }
for arch in mantic.keys():
res[arch] = {
"20_04": gen_entry_dict(focal[arch]),
"22_04": gen_entry_dict(jammy[arch]),
"22_10": gen_entry_dict(kinetic[arch]),
"23_04": gen_entry_dict(lunar[arch]),
"23_10": gen_entry_dict(mantic[arch]),
}
return json.dumps(res)

if __name__ == '__main__':
ubuntu_json = ubuntu_parse()
with open("ubuntu.json", "w") as f:
f.write(ubuntu_json)
debian_json = debian_parse()
with open("debian.json", "w") as f:
f.write(debian_json)
4 changes: 2 additions & 2 deletions ubuntu/default.nix
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
let
imagesJSON = lib.importJSON ./images.json;
fetchImage = image: pkgs.fetchurl {
inherit (image) hash;
url = "https://cloud-images.ubuntu.com/releases/${image.releaseName}/release-${image.releaseTimeStamp}/${image.name}";
sha256 = image.hash;
url = image.name;
};
images = lib.mapAttrs (k: v: fetchImage v) imagesJSON.${system};
makeVmTestForImage = image: { testScript, sharedDirs, diskSize ? null }: generic.makeVmTest {
Expand Down

0 comments on commit b5ce328

Please sign in to comment.