diff --git a/.github/sc4e-check-updates.py b/.github/sc4e-check-updates.py index b98e63b3..730d3644 100644 --- a/.github/sc4e-check-updates.py +++ b/.github/sc4e-check-updates.py @@ -4,6 +4,8 @@ # # Pass directories or yaml files as arguments. +# TODO incorporate this script into sc4pac-actions + import yaml import sys import os diff --git a/.github/sc4pac-yaml-schema.py b/.github/sc4pac-yaml-schema.py deleted file mode 100644 index b73d3ee1..00000000 --- a/.github/sc4pac-yaml-schema.py +++ /dev/null @@ -1,527 +0,0 @@ -#!/usr/bin/env python3 -# -# Pass directories or yaml files as arguments to validate sc4pac yaml files. - -import yaml -import sys -import os -import re -from urllib.parse import (urlparse, parse_qs) -import jsonschema -from jsonschema import ValidationError - -# add subfolders as necessary -subfolders = r""" -### [subfolders-docsify] -050-load-first -100-props-textures -150-mods -170-terrain -180-flora -200-residential -300-commercial -360-landmark -400-industrial -410-agriculture -500-utilities -600-civics -610-safety -620-education -630-health -640-government -650-religion -660-parks -700-transit -710-automata -900-overrides -### [subfolders-docsify] -""".strip().splitlines()[1:-1] - -# Add packages as necessary if the check for matching package and asset -# versions would otherwise fail and if there is a reason why the versions -# differ. -ignore_version_mismatches = set([ - "vortext:vortexture-1", - "vortext:vortexture-2", - "t-wrecks:industrial-revolution-mod-addon-set-i-d", - "memo:industrial-revolution-mod", - "bsc:mega-props-jrj-vol01", - "bsc:mega-props-diggis-canals-streams-and-ponds", - "bsc:mega-props-rubik3-vol01-wtc-props", - "bsc:mega-props-newmaninc-rivers-and-ponds", -]) - -# Add packages as necessary. These packages should only be used as dependencies -# from packages with a matching variant. For example, a package without a DN -# variant should never depend on simfox:day-and-nite-mod. -variant_specific_dependencies = { - "simfox:day-and-nite-mod": ("nightmode", "dark"), - "toroca:industry-quadrupler": ("toroca:industry-quadrupler:capacity", "quadrupled"), - "cam:colossus-addon-mod": ("CAM", "yes"), -} - -unique_strings = { - "type": "array", - "items": {"type": "string"}, - "uniqueItems": True, -} - -map_of_strings = { - "type": "object", - "patternProperties": {".*": {"type": "string"}}, -} - -asset_schema = { - "title": "Asset", - "type": "object", - "additionalProperties": False, - "required": ["assetId", "version", "lastModified", "url"], - "properties": { - "assetId": {"type": "string"}, - "version": {"type": "string"}, - "lastModified": {"type": "string"}, - "url": {"type": "string", "validate_query_params": True}, - "nonPersistentUrl": {"type": "string", "validate_query_params": True}, - "archiveType": { - "type": "object", - "additionalProperties": False, - "properties": { - "format": {"enum": ["Clickteam"]}, - "version": {"enum": ["20", "24", "30", "35", "40"]}, - }, - }, - "checksum": { - "type": "object", - "additionalProperties": False, - "required": ["sha256"], - "properties": { - "sha256": {"type": "string", "validate_sha256": True}, - }, - }, - }, -} - -assets = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "required": ["assetId"], - "properties": { - "assetId": {"type": "string"}, - "include": {**unique_strings, "validate_pattern": True}, - "exclude": {**unique_strings, "validate_pattern": True}, - "withChecksum": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "required": ["include", "sha256"], - "properties": { - "include": {"type": "string", "validate_pattern": True}, - "sha256": {"type": "string", "validate_sha256": True}, - }, - }, - "uniqueItems": True, - }, - }, - }, -} - -package_schema = { - "title": "Package", - "type": "object", - "additionalProperties": False, - "required": ["group", "name", "version", "subfolder"], - "properties": { - "group": {"type": "string"}, - "name": {"type": "string", "validate_name": True}, - "version": {"type": "string"}, - "subfolder": {"enum": subfolders}, - "dependencies": unique_strings, - "assets": assets, - "variants": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "required": ["variant"], - "properties": { - "variant": map_of_strings, - "dependencies": unique_strings, - "assets": assets, - }, - }, - }, - "variantDescriptions": { - "type": "object", - "patternProperties": {".*": map_of_strings}, - }, - "info": { - "type": "object", - "additionalProperties": False, - "properties": { - "summary": {"type": "string"}, - "warning": {"type": "string", "validate_text_field": "warning"}, - "conflicts": {"type": "string", "validate_text_field": "conflicts"}, - "description": {"type": "string", "validate_text_field": "description"}, - "author": {"type": "string"}, - "images": unique_strings, - "website": {"type": "string", "validate_query_params": True}, - }, - }, - }, -} - -schema = { - "oneOf": [asset_schema, package_schema] -} - -# if there are dependencies to packages in other channels, add those channels here -extra_channels = [ - # "https://memo33.github.io/sc4pac/channel/sc4pac-channel-contents.json", -] - - -class DependencyChecker: - - naming_convention = re.compile(r"[a-z0-9]+(?:-[a-z0-9]+)*") - naming_convention_variants_value = re.compile(r"[a-z0-9]+([-\.][a-z0-9]+)*", re.IGNORECASE) - naming_convention_variants = re.compile( # group:package:variant (regex groups: \1:\2:\3) - rf"(?:({naming_convention.pattern}):)?(?:({naming_convention.pattern}):)?([a-zA-Z0-9]+(?:[-\.][a-zA-Z0-9]+)*)") - version_rel_pattern = re.compile(r"(.*?)(-\d+)?") - pronouns_pattern = re.compile(r"\b[Mm][ey]\b|(?:\bI\b(?!-|\.| [A-Z]))") - desc_invalid_chars_pattern = re.compile(r'\\n|\\"') - sha256_pattern = re.compile(r"[a-f0-9]*", re.IGNORECASE) - - def __init__(self): - self.known_packages = set() - self.known_assets = set() - self.referenced_packages = set() - self.referenced_assets = set() - self.self_dependencies = set() - self.duplicate_packages = set() - self.duplicate_assets = set() - self.asset_urls = {} # asset -> url - self.asset_versions = {} # asset -> version - self.overlapping_variants = set() - self.known_variant_values = {} - self.unexpected_variants = [] - self.invalid_asset_names = set() - self.invalid_group_names = set() - self.invalid_package_names = set() - self.invalid_variant_names = set() - self.packages_with_single_assets = {} # pkg -> (version, set of assets from variants) - self.packages_using_asset = {} # asset -> set of packages - self.dlls_without_checksum = set() - self.http_without_checksum = set() - self.unexpected_variant_specific_dependencies = [] # (pkg, dependency) - - def aggregate_identifiers(self, doc): - if 'assetId' in doc: - asset = doc['assetId'] - if asset not in self.known_assets: - self.known_assets.add(asset) - else: - self.duplicate_assets.add(asset) - url = doc.get('url') - self.asset_urls[asset] = url - self.asset_versions[asset] = doc.get('version') - if not self.naming_convention.fullmatch(asset): - self.invalid_asset_names.add(asset) - if urlparse(url).scheme not in ['https', 'file'] and 'checksum' not in doc: - self.http_without_checksum.add(asset) - if 'group' in doc and 'name' in doc: - pkg = doc['group'] + ":" + doc['name'] - if pkg not in self.known_packages: - self.known_packages.add(pkg) - else: - self.duplicate_packages.add(pkg) - if not self.naming_convention.fullmatch(doc['group']): - self.invalid_group_names.add(doc['group']) - if not self.naming_convention.fullmatch(doc['name']): - self.invalid_package_names.add(doc['name']) - - def asset_ids(obj): - return (a['assetId'] for a in obj.get('assets', []) if 'assetId' in a) - - variants0 = doc.get('variants', []) - def iterate_doc_and_variants(): - yield doc - yield from variants0 - - for obj in iterate_doc_and_variants(): - local_deps = obj.get('dependencies', []) - self.referenced_packages.update(local_deps) - for dep in local_deps: - if dep == pkg: - self.self_dependencies.add(pkg) - if dep in variant_specific_dependencies: - expected_variant, expected_value = variant_specific_dependencies[dep] - if obj.get('variant', {}).get(expected_variant) != expected_value: - self.unexpected_variant_specific_dependencies.append((pkg, dep)) - - local_assets = list(asset_ids(obj)) - self.referenced_assets.update(local_assets) - for a in local_assets: - if a in self.packages_using_asset: - self.packages_using_asset[a].add(pkg) - else: - self.packages_using_asset[a] = set([pkg]) - - num_doc_assets = len(doc.get('assets', [])) - if num_doc_assets <= 1: - single_assets = set(asset_ids(doc)) - if all(len(v.get('assets', [])) <= 1 for v in variants0): - for v in variants0: - single_assets.update(asset_ids(v)) - self.packages_with_single_assets[pkg] = (doc.get('version'), single_assets) - - variants = [v.get('variant', {}) for v in variants0] - if len(variants) != len(set(tuple(sorted(v.items())) for v in variants)): - # the same variant should not be defined twice - self.overlapping_variants.add(pkg) - - variant_keys = set(key for v in variants for key, value in v.items()) - for key in variant_keys: - variant_values = set(v[key] for v in variants if key in v) - if key not in self.known_variant_values: - self.known_variant_values[key] = variant_values - elif self.known_variant_values[key] != variant_values: - self.unexpected_variants.append((pkg, key, sorted(variant_values), sorted(self.known_variant_values[key]))) - else: - pass - if not self.naming_convention_variants.fullmatch(key): - self.invalid_variant_names.add(key) - for value in variant_values: - if not self.naming_convention_variants_value.fullmatch(value): - self.invalid_variant_names.add(value) - - is_dll = ("DLL" in doc.get('info', {}).get('summary', "")) or ("dll" in doc['name'].split('-')) - if is_dll: - has_asset = False - has_checksum = False - for obj in iterate_doc_and_variants(): - for asset in obj.get('assets', []): - has_asset = True - if "withChecksum" in asset: - has_checksum = True - if has_asset and not has_checksum: - self.dlls_without_checksum.add(pkg) - - - def _get_channel_contents(self, channel_url): - import urllib.request - import json - req = urllib.request.Request(channel_url) - with urllib.request.urlopen(req) as data: - channel_contents = json.load(data) - return channel_contents['contents'] - - def unknowns(self): - packages = self.referenced_packages.difference(self.known_packages) - assets = self.referenced_assets.difference(self.known_assets) - if packages or assets: - # some dependencies are not known, so check other channels - contents = [self._get_channel_contents(channel_url) for channel_url in extra_channels] - remote_assets = [pkg['name'] for c in contents for pkg in c if pkg['group'] == "sc4pacAsset"] - remote_packages = [f"{pkg['group']}:{pkg['name']}" for c in contents for pkg in c if pkg['group'] != "sc4pacAsset"] - packages = packages.difference(remote_packages) - assets = assets.difference(remote_assets) - return {'packages': sorted(packages), 'assets': sorted(assets)} - - def duplicates(self): - return {'packages': sorted(self.duplicate_packages), - 'assets': sorted(self.duplicate_assets)} - - def assets_with_same_url(self): - url_assets = {u: a for a, u in self.asset_urls.items()} - non_unique_assets = [(a1, a2) for a1, u in self.asset_urls.items() - if (a2 := url_assets[u]) != a1] - return non_unique_assets - - def unused_assets(self): - return sorted(self.known_assets.difference(self.referenced_assets)) - - # turns a patch version such as 1.0.0-2 into 1.0.0 - def _version_without_rel(self, version): - return self.version_rel_pattern.fullmatch(version).group(1) - - def _should_expect_matching_version_for_asset(self, asset): - # for assets used by more packages, we assume that the asset contains - # multiple unrelated packages, so versions of packages do not need to match - return len(self.packages_using_asset.get(asset, [])) <= 3 - - def package_asset_version_mismatches(self): - for pkg, (version, assets) in self.packages_with_single_assets.items(): - if pkg in ignore_version_mismatches: - continue - v1 = self._version_without_rel(version) - for asset in assets: - if self._should_expect_matching_version_for_asset(asset): - v2 = self._version_without_rel(self.asset_versions.get(asset, 'None')) - if v1 != v2: - yield (pkg, v1, asset, v2) - - -def validate_document_separators(text) -> None: - needs_separator = False - errors = 0 - for line in text.splitlines(): - if line.startswith("---"): - needs_separator = False - elif (line.startswith("group:") or line.startswith("\"group\":") or - line.startswith("url:") or line.startswith("\"url\":")): - if needs_separator: - errors += 1 - else: - needs_separator = True - elif line.startswith("..."): - break - if errors > 0: - raise yaml.parser.ParserError( - "YAML file contains multiple package and asset definitions. They all need to be separated by `---`.") - - -def validate_pattern(validator, value, instance, schema): - patterns = [instance] if isinstance(instance, str) else instance - bad_patterns = [p for p in patterns if p.startswith('.*')] - if bad_patterns: - yield ValidationError(f"include/exclude patterns should not start with '.*' in {bad_patterns}") - - -_irrelevant_query_parameters = [ - ("sc4evermore.com", ("catid",)), - ("simtropolis.com", ("confirm", "t", "csrfKey")), -] - - -def validate_query_params(validator, value, url, schema): - msgs = [] - if '/sc4evermore.com/' in url: - msgs.append(f"Domain of URL {url} should be www.sc4evermore.com (add www.)") - qs = parse_qs(urlparse(url).query) - bad_params = [p for domain, params in _irrelevant_query_parameters - if domain in url for p in params if p in qs] - if bad_params: - msgs.append(f"Avoid these URL query parameters: {', '.join(bad_params)}") - if msgs: - yield ValidationError('\n'.join(msgs)) - - -def validate_name(validator, value, name, schema): - if "-vol-" in name: - yield ValidationError(f"Avoid the hyphen after 'vol' (for consistency with other packages): {name}") - - -def validate_text_field(validator, field, text, schema): - msgs = [] - if text is not None and text.strip().lower() == "none": - msgs.append(f"""Text "{field}" should not be "{text.strip()}", but should be omitted instead.""") - if text is not None and DependencyChecker.pronouns_pattern.search(text): - msgs.append(f"""The "{field}" should be written in a neutral perspective (avoid the words 'I', 'me', 'my').""") - if text is not None and DependencyChecker.desc_invalid_chars_pattern.search(text): - msgs.append("""The "{field}" seems to be malformed (avoid the characters '\\n', '\\"').""") - if msgs: - yield ValidationError('\n'.join(msgs)) - - -def validate_sha256(validator, value, text, schema): - if not (len(text) == 64 and DependencyChecker.sha256_pattern.fullmatch(text)): - yield ValidationError(f"value is not a sha256: {text}") - - -def main() -> int: - args = sys.argv[1:] - if not args: - "Pass at least one directory or yaml file to validate as argument." - return 1 - - validator = jsonschema.validators.extend( - jsonschema.validators.Draft202012Validator, - validators=dict( - validate_pattern=validate_pattern, - validate_query_params=validate_query_params, - validate_name=validate_name, - validate_text_field=validate_text_field, - validate_sha256=validate_sha256, - ), - )(schema) - validator.check_schema(schema) - dependency_checker = DependencyChecker() - validated = 0 - errors = 0 - - def basic_report(identifiers, msg: str, stringify=None): - if identifiers: - nonlocal errors - errors += len(identifiers) - print(f"===> {msg}") - for identifier in identifiers: - print(identifier if stringify is None else stringify(identifier)) - - for d in args: - for (root, dirs, files) in os.walk(d): - for fname in files: - if not fname.endswith(".yaml"): - continue - p = os.path.join(root, fname) - with open(p, encoding='utf-8') as f: - validated += 1 - text = f.read() - try: - validate_document_separators(text) - for doc in yaml.safe_load_all(text): - if doc is None: # empty yaml file or document - continue - dependency_checker.aggregate_identifiers(doc) - err = jsonschema.exceptions.best_match(validator.iter_errors(doc)) - msgs = [] if err is None else [err.message] - - if msgs: - errors += 1 - print(f"===> {p}") - for msg in msgs: - print(msg) - except yaml.parser.ParserError as err: - errors += 1 - print(f"===> {p}") - print(err) - - if not errors: - # check that all dependencies exist - # (this check only makes sense for the self-contained main channel) - for label, unknown in dependency_checker.unknowns().items(): - basic_report(unknown, f"The following {label} are referenced, but not defined:") - for label, dupes in dependency_checker.duplicates().items(): - basic_report(dupes, f"The following {label} are defined multiple times:") - basic_report(dependency_checker.self_dependencies, "The following packages unnecessarily depend on themselves:") - basic_report(dependency_checker.unexpected_variant_specific_dependencies, "The following packages have dependencies that should only be used with specific variants:", - lambda tup: "{0} depends on {1}, but this dependency should only be used with variant \"{2}={3}\"".format(*(tup + variant_specific_dependencies[tup[1]]))) - basic_report(dependency_checker.assets_with_same_url(), - "The following assets have the same URL (The same asset was defined twice with different asset IDs):", - lambda assets: ', '.join(assets)) - basic_report(dependency_checker.unused_assets(), "The following assets are not used:") - basic_report(dependency_checker.overlapping_variants, "The following packages have duplicate variants:") - basic_report(dependency_checker.unexpected_variants, "", - lambda tup: "{0} defines unexpected {1} variants {2} (expected: {3})".format(*tup)) # pkg, key, values, expected_values - basic_report(dependency_checker.invalid_asset_names, "the following assetIds do not match the naming convention (lowercase alphanumeric hyphenated)") - basic_report(dependency_checker.invalid_group_names, "the following group identifiers do not match the naming convention (lowercase alphanumeric hyphenated)") - basic_report(dependency_checker.invalid_package_names, "the following package names do not match the naming convention (lowercase alphanumeric hyphenated)") - basic_report(dependency_checker.invalid_variant_names, "the following variant labels or values do not match the naming convention (alphanumeric hyphenated or dots)") - basic_report(list(dependency_checker.package_asset_version_mismatches()), - "The versions of the following packages do not match the version of the referenced assets (usually they should agree, but if the version mismatch is intentional, the packages can be added to the ignore list in .github/sc4pac-yaml-schema.py):", - lambda tup: """{0} "{1}" (expected version "{3}" of asset {2})""".format(*tup)) # pkg, v1, asset, v2 - basic_report(dependency_checker.dlls_without_checksum, "The following packages appear to contain DLLs. A sha256 checksum is required for DLLs (add a `withChecksum` field).") - basic_report(dependency_checker.http_without_checksum, "The following assets use http instead of https. They should include a `checksum` field.") - - if errors > 0: - print(f"Finished with {errors} errors.") - return 1 - else: - print(f"Successfully validated {validated} files.") - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/.github/st-check-updates.py b/.github/st-check-updates.py deleted file mode 100644 index c9367818..00000000 --- a/.github/st-check-updates.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python3 -# -# Checks whether any assets on STEX are newer than stated in our yaml files, -# considering the last 180 days. -# The STEX_API_KEY environment variable must be set for authentication. -# -# Pass `--mode=id` as argument to query exactly the IDs used in asset URLs. -# Defaults to `--mode=updated` which queries for recently updated IDs only. -# -# Additionally, pass directories or yaml files as arguments. - -import yaml -import sys -import os -import re -from dateutil.parser import isoparse -from datetime import timezone, timedelta -import urllib.request -import json - -stex_api_key = os.environ.get('STEX_API_KEY') # issued by ST admins -url_id_pattern = re.compile(r".*simtropolis.com/files/file/(\d+)-.*?(?:$|[?&]r=(\d+).*$)") # matches ID and optional subfile ID -since_days = 180 # to keep the request small -id_limit = 250 # to keep the request small - - -def nonempty_docs(dirs_or_files): - # Generate all the paths with non-empty documents contained in the yaml files. - # Yield (path, None) in case of parse error. - for d in dirs_or_files: - paths = [d] if not os.path.isdir(d) else \ - (os.path.join(root, fname) for (root, dirs, files) in os.walk(d) for fname in files) - for path in paths: - if not path.endswith(".yaml"): - continue - with open(path, encoding='utf-8') as f: - text = f.read() - try: - for doc in yaml.safe_load_all(text): - if doc is None: # empty yaml file or document - continue - yield path, doc - except yaml.parser.ParserError: - path, None - - -def main() -> int: - args = sys.argv[1:] - id_mode = any(a == "--mode=id" for a in args) # instead of --mode=updated - args = [a for a in args if not a.startswith("--")] - if not args: - print("Found no yaml files to analyze.") - return 0 - - if not stex_api_key: - print("The STEX_API_KEY environment variable must be set for authentication.") - return 1 - - errors = 0 - if id_mode: - file_ids = [] - for p, doc in nonempty_docs(args): - if doc is None: # parse error - errors += 1 - continue - - # find all STEX file IDs - url = doc.get('nonPersistentUrl') or doc.get('url') - if url is None: - continue # not an asset - m = url_id_pattern.fullmatch(url) - if not m: - continue # we only check ST files - file_id = m.group(1) - file_ids.append(file_id) - - if not file_ids: - print("No STEX file IDs found in yaml files.") - return 0 - - # check relevant STEX file IDs only - req_url = f"https://community.simtropolis.com/stex/files-api.php?key={stex_api_key}&sort=desc&id=" + ",".join(file_ids[:id_limit]) - else: - # check most recently updated STEX entries only - req_url = f"https://community.simtropolis.com/stex/files-api.php?key={stex_api_key}&days={since_days}&mode=updated&sc4only=true&sort=desc" - - req = urllib.request.Request(req_url, headers={'User-Agent': 'Mozilla/5.0 Firefox/130.0'}) - with urllib.request.urlopen(req) as data: - report = json.load(data) - upstream_state = {str(item['id']): item for item in report} - - out_of_date = 0 - up_to_date = 0 - skipped = 0 - for p, doc in nonempty_docs(args): - if doc is None: # parse error - errors += 1 - continue - - # check URLs - url = doc.get('nonPersistentUrl') or doc.get('url') - if url is None: - continue # not an asset - m = url_id_pattern.fullmatch(url) - if not m: - continue # we only check ST files - file_id = m.group(1) - if file_id not in upstream_state: - skipped += 1 # not updated since_days - continue - - subfile_id = m.group(2) # possibly None - subfiles = upstream_state[file_id].get('files', []) - if subfile_id is None: - if len(subfiles) != 1: - errors += 1 - print(f"{doc.get('assetId')}:") - print(f" url must include subfile ID `r=#` as there are {len(subfiles)} subfiles:") - print(" " + "\n ".join(f"{r.get('id')}: {r.get('name')}" for r in subfiles)) - print(f" {upstream_state[file_id].get('fileURL')}") - else: - if subfile_id not in [str(r.get('id')) for r in subfiles]: - errors += 1 - print(f"{doc.get('assetId')}:") - print(f" url subfile ID {subfile_id} does not exist (anymore), so must be updated:") - print(" " + "\n ".join(f"{r.get('id')}: {r.get('name')}" for r in subfiles)) - print(f" {upstream_state[file_id].get('fileURL')}") - - last_modified_upstream = isoparse(upstream_state[file_id]['updated']) - if last_modified_upstream.tzinfo is None: - last_modified_upstream = last_modified_upstream.replace(tzinfo=timezone.utc) - - if 'lastModified' not in doc: - errors += 1 # TODO - else: - last_modified = isoparse(doc.get('lastModified')) - # we ignore small timestamp differences - if abs(last_modified_upstream - last_modified) <= timedelta(minutes=10): - up_to_date += 1 - else: - if last_modified < last_modified_upstream: - out_of_date += 1 - else: - errors += 1 # our assets should not be newer than upstream's assets TODO - print("error: ", end='') - print(f"{doc.get('assetId')}:") - print(f" {doc.get('version')} -> {upstream_state[file_id].get('release')}") - print(f" {last_modified.isoformat().replace('+00:00', 'Z')} -> {last_modified_upstream.isoformat().replace('+00:00', 'Z')}") - print(f" {upstream_state[file_id].get('fileURL')}") - print(f" {p}") - - skipped_msg = ( - "" if not skipped else - f" (skipped {skipped} assets not updated in the last {since_days} days)" if not id_mode else - f" (skipped {skipped} assets)") - result = 0 - if out_of_date == 0: - print(f"All {up_to_date} ST assets are up-to-date{skipped_msg}.") - else: - print(f"There are {out_of_date} outdated ST assets, while {up_to_date} are up-to-date{skipped_msg}.") - result |= 0x02 - if errors > 0: - print(f"Finished with {errors} errors.") - result |= 0x01 - return result - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/.github/url-check.sh b/.github/url-check.sh deleted file mode 100755 index 8ceefb9f..00000000 --- a/.github/url-check.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh -# Check all STEX URLs contained in files that have been modified since a commit. -set -e -if [ "$#" -ne 2 ]; then - echo "Pass the commit/branch to compare to as first argument, the src folder as second." - exit 1 -fi - -BASE="$(git merge-base @ "$1")" - -git diff "$BASE" --name-only -- "$2" | xargs --delimiter '\n' python .github/st-check-updates.py --mode=id diff --git a/.github/workflows/sc4pac.yaml b/.github/workflows/sc4pac.yaml index 74a6733b..c0ec6538 100644 --- a/.github/workflows/sc4pac.yaml +++ b/.github/workflows/sc4pac.yaml @@ -1,134 +1,25 @@ -name: Sc4pac CI +name: sc4pac CI on: push: branches: [ "main", "action" ] pull_request_target: branches: [ "main" ] - workflow_dispatch: # for manually triggering the workflow from Actions tab -permissions: - contents: read +# permissions: +# contents: read jobs: - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - name: Install dependencies - run: python -m pip install --upgrade PyYAML jsonschema - - name: Checkout pull request - if: ${{ github.event_name == 'pull_request_target' }} - uses: actions/checkout@v4 - with: - persist-credentials: false - # ref: "${{ github.event.pull_request.merge_commit_sha }}" - # As merge_commit_sha is not up-to-date due to mergeability check, we use actual PR head for now; see https://github.com/actions/checkout/issues/518#issuecomment-1757453837 - # ref: ${{ github.event.pull_request.head.sha }} - # (This merge might correspond to a newer commit than the one that triggered this workflow, in case the PR was updated in the meantime -> ok) - ref: ${{ format('refs/pull/{0}/merge', github.event.pull_request.number) }} - path: tmp - sparse-checkout: | - src/yaml - - name: Check sc4pac yaml schema (pull_request_target) - if: ${{ github.event_name == 'pull_request_target' }} - # With pull_request_target, the `main` branch is checked out, not the PR. - # We checked out PR into `tmp` and run script from main branch. - run: cd tmp && python ../.github/sc4pac-yaml-schema.py src/yaml - - name: Check sc4pac yaml schema (push) - if: ${{ github.event_name != 'pull_request_target' }} - # We are on an actual branch of the repository, so run script here in the repository. - run: python .github/sc4pac-yaml-schema.py src/yaml - - # requires STEX_API_KEY, so job is skipped in forks - url-check: - if: ${{ github.repository == 'memo33/sc4pac' }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - # to allow diff of other commit - fetch-depth: 0 - persist-credentials: false - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - name: Install dependencies - run: python -m pip install --upgrade PyYAML jsonschema python-dateutil - - name: Checkout pull request - if: ${{ github.event_name == 'pull_request_target' }} - uses: actions/checkout@v4 - with: - persist-credentials: false - # ref: "${{ github.event.pull_request.merge_commit_sha }}" - # As merge_commit_sha is not up-to-date due to mergeability check, we use actual PR head for now; see https://github.com/actions/checkout/issues/518#issuecomment-1757453837 - # ref: ${{ github.event.pull_request.head.sha }} - # (This merge might correspond to a newer commit than the one that triggered this workflow, in case the PR was updated in the meantime -> ok) - ref: ${{ format('refs/pull/{0}/merge', github.event.pull_request.number) }} - path: tmp - sparse-checkout: | - src/yaml - - name: Check STEX URLs (pull_request_target) - if: ${{ github.event_name == 'pull_request_target' }} - env: - STEX_API_KEY: ${{ secrets.STEX_API_KEY }} - # We checked out PR into `tmp` and run script from main branch. - run: cd tmp && git diff --no-index --name-only ../src/yaml src/yaml | xargs --delimiter '\n' python ../.github/st-check-updates.py --mode=id - - name: Check STEX URLs (push) - if: ${{ github.event_name != 'pull_request_target' }} - env: - STEX_API_KEY: ${{ secrets.STEX_API_KEY }} - PREVIOUS_COMMIT: ${{ github.event.before }} - # We are on an actual branch of the repository, so run script here in the repository. - # TODO This is not perfect yet, as `before` does not exist on new branches or forced pushes. - run: git diff --name-only "$PREVIOUS_COMMIT" -- src/yaml | xargs --delimiter '\n' python .github/st-check-updates.py --mode=id - - deploy: - needs: lint # url-check is not needed as ST is flaky - if: ${{ github.repository == 'memo33/sc4pac' && github.ref == 'refs/heads/main' && github.event_name != 'pull_request_target' }} - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - runs-on: ubuntu-latest - # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. - # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. - concurrency: - group: "pages" - cancel-in-progress: false - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Set up JDK 17 - uses: actions/setup-java@v4 - with: - java-version: '17' - distribution: 'temurin' - cache: 'sbt' - - uses: sbt/setup-sbt@v1 - - name: Build sc4pac executable - run: cd sc4pac-tools && sbt assembly && ./sc4pac --version - - name: Build channel and website - run: make LABEL=Main gh-pages-no-lint - - name: Setup Pages - uses: actions/configure-pages@v4 - - name: Upload artifact - # note that this action dereferences our `latest` symlinks, but that's not a huge problem, it just duplicates each json file - uses: actions/upload-pages-artifact@v3 - with: - path: "gh-pages" - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v4 + channel: + uses: memo33/sc4pac-actions/.github/workflows/sc4pac-channel.yaml@main + with: + path: src/yaml + channel-label: Main + deploy-repository: memo33/sc4pac + sc4pac-tools-submodule: sc4pac-tools + use-stex-api: true + secrets: + stex-api-key: ${{ secrets.STEX_API_KEY }} permissions: - pages: write - id-token: write + pages: write # to deploy to GitHub Pages + id-token: write # to verify the deployment originates from an appropriate source diff --git a/Makefile b/Makefile index 4043b1b2..8e5413e3 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,9 @@ SC4PAC=./sc4pac-tools/sc4pac # LABEL=Main LABEL=Main-local +# assumes you have checked out sc4pac-actions in the same parent folder +ACTIONS=../sc4pac-actions + # Rebuild all .json files, the main.js file and update the gh-pages branch. # # This assumes that you have initialized the submodule `sc4pac-tools` with: @@ -23,10 +26,9 @@ gh-pages: lint gh-pages-no-lint gh-pages-no-lint: rm -rf ./gh-pages/ $(MAKE) channel - cd ./sc4pac-tools/ && sbt web/fullLinkJS - cp -p ./sc4pac-tools/web/target/scala-3.4.2/sc4pac-web-opt/main.js ./gh-pages/channel/ - cp -p ./sc4pac-tools/web/channel/styles.css ./sc4pac-tools/web/channel/index.html ./gh-pages/channel/ - cp -p ./docs/index.html ./docs/*.md ./docs/.nojekyll ./gh-pages/ + cd ./sc4pac-tools/ && ./src/scripts/build-channel-page.sh + cp -p ./sc4pac-tools/web/target/website/channel/* ./gh-pages/channel/ + cp -pr ./docs/. ./gh-pages/ channel: $(SC4PAC) channel build --label $(LABEL) --metadata-source-url https://github.com/memo33/sc4pac/blob/main/src/yaml/ --output ./gh-pages/channel/ ./src/yaml/ @@ -40,16 +42,18 @@ host-docs: cd ./docs/ && python -m http.server 8091 lint: - python .github/sc4pac-yaml-schema.py src/yaml + python $(ACTIONS)/src/lint.py src/yaml sc4e-check-updates: python .github/sc4e-check-updates.py src/yaml # First reads in the STEX_API_KEY from a file into an environment variable and then checks for asset updates using the authenticated STEX API. st-check-updates: - set -a && source ./.git/sc4pac-stex-api-key && set +a && python .github/st-check-updates.py src/yaml + set -a && source ./.git/sc4pac-stex-api-key && set +a && python $(ACTIONS)/src/st-check-updates.py src/yaml st-url-check: - set -a && source ./.git/sc4pac-stex-api-key && set +a && sh .github/url-check.sh origin/main src/yaml + set -a && source ./.git/sc4pac-stex-api-key && set +a \ + && git diff "$(shell git merge-base @ "origin/main")" --name-only -- "src/yaml" \ + | xargs --delimiter '\n' python $(ACTIONS)/src/st-check-updates.py --mode=id .PHONY: gh-pages gh-pages-no-lint channel host host-docs lint sc4e-check-updates st-check-updates st-url-check diff --git a/docs/.nojekyll b/docs/.nojekyll deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/metadata.md b/docs/metadata.md index f622882d..7abf1c8e 100644 --- a/docs/metadata.md +++ b/docs/metadata.md @@ -176,7 +176,7 @@ These names are prefixed with 3-digit numbers to control load order. List of subfolders currently in use: -[list-of-subfolders](https://raw.githubusercontent.com/memo33/sc4pac/main/.github/sc4pac-yaml-schema.py ':include :type=code "" :fragment=subfolders-docsify') +[list-of-subfolders](https://raw.githubusercontent.com/memo33/sc4pac-actions/main/src/lint.py ':include :type=code "" :fragment=subfolders-docsify') diff --git a/lint-config.yaml b/lint-config.yaml new file mode 100644 index 00000000..bae787e2 --- /dev/null +++ b/lint-config.yaml @@ -0,0 +1,17 @@ +subfolders: [] + +extra-channels: [] + +allow-ego-perspective: false + +# Package names for which the check for asset version mismatches should be skipped. +# Only needed when the linter tells you so. +ignore-version-mismatches: +- vortext:vortexture-1 +- vortext:vortexture-2 +- t-wrecks:industrial-revolution-mod-addon-set-i-d +- memo:industrial-revolution-mod +- bsc:mega-props-jrj-vol01 +- bsc:mega-props-diggis-canals-streams-and-ponds +- bsc:mega-props-rubik3-vol01-wtc-props +- bsc:mega-props-newmaninc-rivers-and-ponds diff --git a/sc4pac-tools b/sc4pac-tools index aa8c16ae..5050a534 160000 --- a/sc4pac-tools +++ b/sc4pac-tools @@ -1 +1 @@ -Subproject commit aa8c16ae789a6ff6e2fc018f384878e2ad6c66e8 +Subproject commit 5050a5343ad513d58e1ad385e58ea46d33742bd1