Skip to content

Commit

Permalink
Merge pull request #415 from dbnicholson/flake8
Browse files Browse the repository at this point in the history
Run flake8 on CI
  • Loading branch information
wjt authored Apr 30, 2024
2 parents 64c4b7f + a9a32e1 commit ac28f16
Show file tree
Hide file tree
Showing 26 changed files with 196 additions and 146 deletions.
4 changes: 4 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,7 @@
# <https://github.com/python/black#line-length>, defaults to 88, based on some
# empirical research on reducing annoying line wrapping on real source code.
max-line-length = 88

# Black disagrees with some rules.
# E203 - Whitespace before ':'
extend-ignore = E203
4 changes: 4 additions & 0 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ jobs:
run: |
python3 -m black --check --diff .
- name: Run flake8
run: |
python3 -m flake8
check-typing:
name: Static type check
runs-on: ubuntu-latest
Expand Down
1 change: 1 addition & 0 deletions dependencies.apt.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ python3-brotli
python3-cairo
python3-chardet
python3-editorconfig
python3-flake8
python3-gi
python3-github
python3-jsonschema
Expand Down
15 changes: 8 additions & 7 deletions src/checkers/debianrepochecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,10 @@ async def check(self, external_data: ExternalBase):

new_version = ExternalFile(
url=candidate.uri,
# FIXME: apt.package.Version.{md5,sha1,sha256} can raise an exception
# if given hash isn't set, while sha512 isn't accessible at all.
# Raw hashes are handy, but accessible only through protected property.
# FIXME: apt.package.Version.{md5,sha1,sha256} can raise an
# exception if given hash isn't set, while sha512 isn't accessible
# at all. Raw hashes are handy, but accessible only through
# protected property.
checksum=read_deb_hashes(candidate._records.hashes),
size=candidate.size,
version=candidate.version,
Expand All @@ -180,10 +181,10 @@ def _translate_arch(self, arch: str) -> str:
return arches.get(arch, arch)

async def _get_timestamp_for_candidate(self, candidate: apt.Version):
# TODO: fetch package, parse changelog, get the date from there.
# python-apt can fetch changelogs from Debian and Ubuntu's changelog
# server, but most packages this checker will be used for are not from these repos.
# We'd have to open-code it.
# TODO: fetch package, parse changelog, get the date from there. python-apt can
# fetch changelogs from Debian and Ubuntu's changelog server, but most packages
# this checker will be used for are not from these repos. We'd have to open-code
# it.
# https://salsa.debian.org/apt-team/python-apt/blob/master/apt/package.py#L1245-1417
assert candidate.uri
return await get_timestamp_from_url(candidate.uri, self.session)
Expand Down
3 changes: 2 additions & 1 deletion src/checkers/gitchecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ async def _check_has_new(external_data: ExternalGitRepo):
latest_tag = sorted_tags[-1]
except IndexError as err:
raise CheckerQueryError(
f"{external_data.current_version.url} has no tags matching '{tag_pattern}'"
f"{external_data.current_version.url} has no tags matching "
f"'{tag_pattern}'"
) from err

new_version = ExternalGitRef(
Expand Down
3 changes: 2 additions & 1 deletion src/checkers/gnomechecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,8 @@ async def check(self, external_data: ExternalBase):
proj_url = GNOME_MIRROR / "sources" / project_name
try:
async with self.session.get(proj_url / "cache.json") as cache_resp:
# Some mirrors may sand invalid content-type; don't require it to be application/json
# Some mirrors may sand invalid content-type; don't require it to be
# application/json
cache_json = await cache_resp.json(content_type=None)
except NETWORK_ERRORS as err:
raise CheckerQueryError from err
Expand Down
10 changes: 6 additions & 4 deletions src/checkers/htmlchecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,9 @@ async def _get_text(self, url: t.Union[URL, str]) -> str:
try:
async with self.session.get(url) as response:
encoding = await self._get_encoding(response)
# We use streaming decoding in order to get decode error and abort the check
# as early as possible, without preloading the whole raw contents into memory
# We use streaming decoding in order to get decode error and abort the
# check as early as possible, without preloading the whole raw contents
# into memory
decoder_cls = codecs.getincrementaldecoder(encoding)
decoder = decoder_cls(errors="strict")
with io.StringIO() as buf:
Expand Down Expand Up @@ -184,8 +185,9 @@ def _get_latest(pattern: re.Pattern, ver_group: int) -> re.Match:
)

try:
# NOTE Returning last match when sort is requested and first match otherwise
# doesn't seem sensible, but we need to retain backward compatibility
# NOTE Returning last match when sort is requested and first match
# otherwise doesn't seem sensible, but we need to retain backward
# compatibility
result = matches[-1 if sort_matches else 0]
except IndexError as err:
raise CheckerQueryError(
Expand Down
7 changes: 4 additions & 3 deletions src/lib/externaldata.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,10 @@ class State(IntFlag):
checker_data: t.Dict[str, t.Any]
module: t.Optional[BuilderModule]
parent: t.Optional[BuilderSource] = dataclasses.field(init=False, default=None)
# fmt: off
checked: asyncio.Event = dataclasses.field(init=False, default_factory=asyncio.Event)
# fmt: on
checked: asyncio.Event = dataclasses.field(
init=False,
default_factory=asyncio.Event,
)

@classmethod
def __init_subclass__(cls, *args, **kwargs):
Expand Down
14 changes: 8 additions & 6 deletions src/lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,11 +160,13 @@ def content_type_rejected(content_type: t.Optional[str]) -> bool:
async for chunk in response.content.iter_chunked(HTTP_CHUNK_SIZE):
if first_chunk:
first_chunk = False
# determine content type from magic number since http header may be wrong
# determine content type from magic number since http header may be
# wrong
actual_content_type = magic.from_buffer(chunk, mime=True)
if content_type_rejected(actual_content_type):
raise CheckerFetchError(
f"Wrong content type '{actual_content_type}' received from '{url}'"
f"Wrong content type '{actual_content_type}' received "
f"from '{url}'"
)

checksum.update(chunk)
Expand Down Expand Up @@ -222,8 +224,7 @@ def __ne__(self, other):


class _SupportsComparison(t.Protocol):
def __lt__(self, other: t.Any) -> bool:
...
def __lt__(self, other: t.Any) -> bool: ...


_VersionedObj = t.TypeVar("_VersionedObj")
Expand Down Expand Up @@ -430,7 +431,7 @@ async def git_ls_remote(url: str) -> t.Dict[str, str]:
raise CheckerQueryError("Listing Git remote failed") from err
git_stdout = git_stdout_raw.decode()

return {r: c for c, r in (l.split() for l in git_stdout.splitlines())}
return {r: c for c, r in (line.split() for line in git_stdout.splitlines())}


async def extract_appimage_version(appimg_io: t.IO):
Expand Down Expand Up @@ -550,7 +551,8 @@ def dump_manifest(contents: t.Dict, manifest_path: t.Union[Path, str]):
# Determine max line length preference
if max_line_length := conf.get("max_line_length"):
try:
_yaml.width = int(max_line_length) # type: ignore # See https://sourceforge.net/p/ruamel-yaml/tickets/322/
# See https://sourceforge.net/p/ruamel-yaml/tickets/322/
_yaml.width = int(max_line_length) # type: ignore
except ValueError:
log.warning("Ignoring invalid max_line_length %r", max_line_length)

Expand Down
10 changes: 7 additions & 3 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,9 +435,13 @@ def parse_cli_args(cli_args=None):
)
parser.add_argument(
"--require-important-update",
help="Require an update to at least one source with is-important or is-main-source to save changes to the manifest. "
"If no instances of is-important or is-main-source are found, assume normal behaviour and always save changes to the manifest. "
"This is useful to avoid PRs generated to update a singular unimportant source.",
help=(
"Require an update to at least one source with is-important or "
"is-main-source to save changes to the manifest. If no instances of "
"is-important or is-main-source are found, assume normal behaviour and "
"always save changes to the manifest. This is useful to avoid PRs "
"generated to update a singular unimportant source."
),
action="store_true",
)

Expand Down
10 changes: 5 additions & 5 deletions src/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,10 +510,10 @@ def _update_appdata(self):
log.debug("Version didn't change, not adding release")

def update_manifests(self) -> t.List[str]:
"""
Updates references to external data in manifests.
If require_important_update is True, only update the manifest
if at least one source with IMPORTANT_SRC_PROP or MAIN_SRC_PROP received an update.
"""Updates references to external data in manifests.
If require_important_update is True, only update the manifest if at least one
source with IMPORTANT_SRC_PROP or MAIN_SRC_PROP received an update.
"""
# We want a list, without duplicates; Python provides an
# insertion-order-preserving dictionary so we use that.
Expand All @@ -526,7 +526,7 @@ def update_manifests(self) -> t.List[str]:
for data in self.get_external_data():
important = data.checker_data.get(IMPORTANT_SRC_PROP)
main = data.checker_data.get(MAIN_SRC_PROP)
if important or (main and important != False):
if important or (main and important is not False):
log.debug("Found an important source: %s", data)

found_important_update = data.has_version_changed
Expand Down
8 changes: 4 additions & 4 deletions tests/org.chromium.Chromium.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ modules:
- name: chromium
sources:
- type: archive
url: https://commondatastorage.googleapis.com/chromium-browser-official/chromium-90.0.4430.212.tar.xz
sha256: abe11d0cb1ff21278aad2eec1a1e279d59176b15331804d7df1807446786d59e
url: https://commondatastorage.googleapis.com/chromium-browser-official/chromium-100.0.4845.0.tar.xz
sha256: a68d31f77a6b7700a5161d82f5932c2822f85f7ae68ad51be3d3cf689a3fe2b0
x-checker-data:
type: chromium
component: chromium
is-main-source: true
- type: archive
url: https://commondatastorage.googleapis.com/chromium-browser-clang/Linux_x64/clang-llvmorg-13-init-1559-g01b87444-3.tgz
sha256: 676448e180fb060d3983f24476a2136eac83c6011c600117686035634a2bbe26
url: https://commondatastorage.googleapis.com/chromium-browser-clang/Linux_x64/clang-libs-llvmorg-14-init-12246-g7787a8f1-2.tgz
sha256: cf6b516a4e410d79439a150927fc8b450b325e2a6349395ae153c9d2dd6c6ed2
x-checker-data:
type: chromium
component: llvm-prebuilt
Expand Down
12 changes: 6 additions & 6 deletions tests/test_anityachecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ async def test_check(self):
self.assertIsInstance(data.new_version, ExternalFile)
self.assertRegex(
data.new_version.url,
r"^https://download.gnome.org/sources/glib-networking/\d+.\d+/glib-networking-[\d.]+.tar.xz$",
r"^https://download.gnome.org/sources/glib-networking/\d+.\d+/glib-networking-[\d.]+.tar.xz$", # noqa: E501
)
self.assertIsNotNone(data.new_version.version)
self.assertGreater(
Expand All @@ -38,15 +38,15 @@ async def test_check(self):
self.assertNotEqual(
data.new_version.checksum,
MultiDigest(
sha256="1f185aaef094123f8e25d8fa55661b3fd71020163a0174adb35a37685cda613b",
sha256="1f185aaef094123f8e25d8fa55661b3fd71020163a0174adb35a37685cda613b", # noqa: E501
),
)
elif data.filename == "boost_1_74_0.tar.bz2":
self.assertIsNotNone(data.new_version)
self.assertIsInstance(data.new_version, ExternalFile)
self.assertRegex(
data.new_version.url,
r"^https://boostorg\.jfrog\.io/artifactory/main/release/[\d.]+/source/boost_[\d]+_[\d]+_[\d]+.tar.bz2$",
r"^https://boostorg\.jfrog\.io/artifactory/main/release/[\d.]+/source/boost_[\d]+_[\d]+_[\d]+.tar.bz2$", # noqa: E501
)
self.assertIsNotNone(data.new_version.version)
self.assertGreater(
Expand All @@ -59,15 +59,15 @@ async def test_check(self):
self.assertNotEqual(
data.new_version.checksum,
MultiDigest(
sha256="83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1"
sha256="83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1" # noqa: E501
),
)
elif data.filename == "flatpak-1.8.2.tar.xz":
self.assertIsNotNone(data.new_version)
self.assertIsInstance(data.new_version, ExternalFile)
self.assertRegex(
data.new_version.url,
r"^https://github.com/flatpak/flatpak/releases/download/[\w\d.]+/flatpak-[\w\d.]+.tar.xz$",
r"^https://github.com/flatpak/flatpak/releases/download/[\w\d.]+/flatpak-[\w\d.]+.tar.xz$", # noqa: E501
)
self.assertIsNotNone(data.new_version.version)
self.assertEqual(
Expand All @@ -80,7 +80,7 @@ async def test_check(self):
self.assertNotEqual(
data.new_version.checksum,
MultiDigest(
sha256="7926625df7c2282a5ee1a8b3c317af53d40a663b1bc6b18a2dc8747e265085b0"
sha256="7926625df7c2282a5ee1a8b3c317af53d40a663b1bc6b18a2dc8747e265085b0" # noqa: E501
),
)
elif data.filename == "ostree.git":
Expand Down
Loading

0 comments on commit ac28f16

Please sign in to comment.