diff --git a/.github/workflows/envoy.yml b/.github/workflows/envoy.yml index e1db550..a770847 100644 --- a/.github/workflows/envoy.yml +++ b/.github/workflows/envoy.yml @@ -20,6 +20,18 @@ jobs: if: github.repository_owner == 'envoyproxy' steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + + - id: versions + run: | + bazel build --config=ci //:debs_downloads + + + exit 1 + + + - uses: actions/cache/restore + with: + key: ${{ steps.versions.outputs.hash }} - run: | bazel build --config=ci //:debs tar tf bazel-bin/debs.tar.gz diff --git a/fetcher.py b/fetcher.py index 9333b32..8bbbf22 100644 --- a/fetcher.py +++ b/fetcher.py @@ -13,6 +13,8 @@ from aio.run import runner from envoy.base.utils import extract, pack +DEFAULT_MAX_CONCURRENCY = 3 + class FetchingRunner(runner.Runner): @@ -31,6 +33,7 @@ def session(self): def add_arguments(self, parser) -> None: super().add_arguments(parser) parser.add_argument('--downloads', help="JSON k/v of downloads/checksums(optional)") + parser.add_argument('--concurrency', default=DEFAULT_MAX_CONCURRENCY, help="Maximum concurrent downloads") parser.add_argument('--extract-downloads', action="store_true", default=False, help="Extract downloaded files") parser.add_argument('--output', help="Output format") parser.add_argument('--output-path', help="Output path") @@ -42,6 +45,13 @@ def download_path(self, url): self.downloads[url]["path"], self.filename(url)) + async def fetch(self, url): + download_path = None + if "path" in self.downloads[url]: + download_path = self.download_path(url) + download_path.parent.mkdir(parents=True, exist_ok=True) + return await self.fetch_bytes(url, path=download_path) + async def fetch_bytes(self, url, path=None): async with self.session.get(url) as response: if not path: @@ -60,13 +70,6 @@ async def fetch_bytes(self, url, path=None): return url, None - async def fetch_artefacts(self, url): - download_path = None - if "path" in self.downloads[url]: - download_path = self.download_path(url) - download_path.parent.mkdir(parents=True, exist_ok=True) - return await self.fetch_bytes(url, path=download_path) - def filename(self, url): parsed_url = urlsplit(url) path_parts = parsed_url.path.split("/") @@ -80,7 +83,7 @@ def hashed(self, content): @runner.cleansup async def run(self): result = {} - async for (url, response) in concurrent((self.fetch_artefacts(url) for url in self.downloads), limit=3): + async for (url, response) in concurrent((self.fetch(url) for url in self.downloads), limit=self.args.concurrency): if self.args.output == "json": result[url] = response.decode()