Skip to content

Commit

Permalink
Apply more ruff rules
Browse files Browse the repository at this point in the history
  • Loading branch information
jl-wynen committed Apr 26, 2024
1 parent 8e54bd1 commit dbd8278
Show file tree
Hide file tree
Showing 13 changed files with 59 additions and 45 deletions.
2 changes: 1 addition & 1 deletion docs/user-guide/testing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@
" if real_client is None:\n",
" pytest.skip(\"Backend tests disabled\")\n",
" # or do something else\n",
" \n",
"\n",
" # do the actual tests"
]
},
Expand Down
6 changes: 3 additions & 3 deletions src/scitacean/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ def _get_or_add_orig_datablock(self, key: int | str | PID) -> OrigDatablock:

def make_upload_model(self) -> UploadDerivedDataset | UploadRawDataset:
"""Construct a SciCat upload model from self."""
model: type[UploadRawDataset] | type[UploadDerivedDataset] = (
model: type[UploadRawDataset | UploadDerivedDataset] = (
UploadRawDataset if self.type == DatasetType.RAW else UploadDerivedDataset
)
# Datablocks are not included here because they are handled separately
Expand Down Expand Up @@ -517,8 +517,8 @@ def keys(self) -> Iterable[str]:
"""
from itertools import chain

all_fields = set(field.name for field in self.fields())
my_fields = set(field.name for field in self.fields(dataset_type=self.type))
all_fields = {field.name for field in self.fields()}
my_fields = {field.name for field in self.fields(dataset_type=self.type)}
other_fields = all_fields - my_fields
invalid_fields = (
f_name for f_name in other_fields if getattr(self, f_name) is not None
Expand Down
14 changes: 7 additions & 7 deletions src/scitacean/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,13 +379,13 @@ def uploaded(
"""
if remote_creation_time is None:
remote_creation_time = datetime.now().astimezone(timezone.utc)
args = dict(
remote_path=RemotePath(remote_path) if remote_path is not None else None,
remote_gid=remote_gid,
remote_uid=remote_uid,
remote_perm=remote_perm,
_remote_creation_time=remote_creation_time,
)
args = {
"remote_path": RemotePath(remote_path) if remote_path is not None else None,
"remote_gid": remote_gid,
"remote_uid": remote_uid,
"remote_perm": remote_perm,
"_remote_creation_time": remote_creation_time,
}
return dataclasses.replace(
self,
_remote_size=remote_size if remote_size is not None else self.size,
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/testing/sftp/_sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def _seed_files() -> Iterable[tuple[str, str]]:
def local_access() -> SFTPAccess:
config = _docker_compose_file()
service = config["services"]["scitacean-test-sftp-server"]
env = {k: v for k, v in map(lambda s: s.split("="), service["environment"])}
env = dict(map(lambda s: s.split("="), service["environment"]))
return SFTPAccess(
host="localhost",
port=service["ports"][0].split(":")[0],
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/testing/transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def download_file(self, *, remote: RemotePath, local: Path) -> None:

def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None:
"""Download multiple files."""
for r, l in zip(remote, local):
for r, l in zip(remote, local, strict=True):
self.download_file(remote=r, local=l)


Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/transfer/link.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class LinkDownloadConnection:

def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None:
"""Download files from the given remote path."""
for r, l in zip(remote, local):
for r, l in zip(remote, local, strict=True):
self.download_file(remote=r, local=l)

def download_file(self, *, remote: RemotePath, local: Path) -> None:
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/transfer/sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, *, sftp_client: SFTPClient, host: str) -> None:

def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None:
"""Download files from the given remote path."""
for r, l in zip(remote, local):
for r, l in zip(remote, local, strict=True):
self.download_file(remote=r, local=l)

def download_file(self, *, remote: RemotePath, local: Path) -> None:
Expand Down
8 changes: 6 additions & 2 deletions tests/client/dataset_client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,9 @@ def test_get_dataset(client):
assert downloaded.meta["temperature"] == dset.scientificMetadata["temperature"]
assert downloaded.meta["data_type"] == dset.scientificMetadata["data_type"]

for dset_file, expected_file in zip(downloaded.files, dblock.dataFileList):
for dset_file, expected_file in zip(
downloaded.files, dblock.dataFileList, strict=True
):
assert dset_file.local_path is None
assert dset_file.size == expected_file.size
assert dset_file.creation_time == expected_file.time
Expand All @@ -100,7 +102,9 @@ def test_can_get_public_dataset_without_login(require_scicat_backend, scicat_acc
assert downloaded.creation_time == dset.creationTime
assert downloaded.access_groups == dset.accessGroups

for dset_file, expected_file in zip(downloaded.files, dblock.dataFileList):
for dset_file, expected_file in zip(
downloaded.files, dblock.dataFileList, strict=True
):
assert dset_file.local_path is None
assert dset_file.size == expected_file.size
assert dset_file.creation_time == expected_file.time
Expand Down
12 changes: 6 additions & 6 deletions tests/common/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ def make_file(
# and avoids potential difficulties of querying the file system.
creation_time = datetime.now().astimezone(timezone.utc)

return dict(
path=path,
creation_time=creation_time,
checksum=checksum_digest,
size=len(contents),
)
return {
"path": path,
"creation_time": creation_time,
"checksum": checksum_digest,
"size": len(contents),
}
34 changes: 22 additions & 12 deletions tests/dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,11 @@ def test_make_scicat_models_datablock_without_files(dataset):
@settings(max_examples=10)
def test_make_scicat_models_datablock_with_one_file(dataset):
file_model = model.DownloadDataFile(
path="path", size=6163, chk="8450ac0", gid="group", time=datetime.now()
path="path",
size=6163,
chk="8450ac0",
gid="group",
time=datetime.now(tz=timezone.utc),
)
dataset.add_files(File.from_download_model(local_path=None, model=file_model))

Expand Down Expand Up @@ -438,7 +442,9 @@ def test_eq_self(dset):
dset.add_files(
File.from_download_model(
local_path=None,
model=model.DownloadDataFile(path="path", size=94571, time=datetime.now()),
model=model.DownloadDataFile(
path="path", size=94571, time=datetime.now(tz=timezone.utc)
),
)
)
dset.attachments.append(
Expand Down Expand Up @@ -481,14 +487,16 @@ def test_neq_single_mismatched_file(initial):
File.from_download_model(
local_path=None,
model=model.DownloadDataFile(
path="path", size=51553312, time=datetime.now()
path="path", size=51553312, time=datetime.now(tz=timezone.utc)
),
)
)
initial.add_files(
File.from_download_model(
local_path=None,
model=model.DownloadDataFile(path="path", size=94571, time=datetime.now()),
model=model.DownloadDataFile(
path="path", size=94571, time=datetime.now(tz=timezone.utc)
),
)
)
assert modified != initial
Expand All @@ -503,7 +511,7 @@ def test_neq_extra_file(initial):
File.from_download_model(
local_path="/local",
model=model.DownloadDataFile(
path="path", size=51553312, time=datetime.now()
path="path", size=51553312, time=datetime.now(tz=timezone.utc)
),
)
)
Expand Down Expand Up @@ -638,7 +646,9 @@ def test_replace_does_not_change_files_no_input_files(initial):
def test_replace_does_not_change_files_with_input_files(initial):
file = File.from_download_model(
local_path=None,
model=model.DownloadDataFile(path="path", size=6163, time=datetime.now()),
model=model.DownloadDataFile(
path="path", size=6163, time=datetime.now(tz=timezone.utc)
),
)
initial.add_files(file)
replaced = initial.replace(owner="a-new-owner")
Expand Down Expand Up @@ -808,9 +818,9 @@ def invalid_field_example(my_type):
@given(initial=sst.datasets(for_upload=True))
@settings(max_examples=10)
def test_dataset_dict_like_keys_per_type(initial: Dataset) -> None:
my_names = set(
my_names = {
field.name for field in Dataset._FIELD_SPEC if field.used_by(initial.type)
)
}
assert set(initial.keys()) == my_names


Expand All @@ -819,9 +829,9 @@ def test_dataset_dict_like_keys_per_type(initial: Dataset) -> None:
def test_dataset_dict_like_keys_including_invalid_field(initial):
invalid_name, invalid_value = invalid_field_example(initial.type)

my_names = set(
my_names = {
field.name for field in Dataset._FIELD_SPEC if field.used_by(initial.type)
)
}
assert invalid_name not in my_names
my_names.add(invalid_name)

Expand All @@ -833,15 +843,15 @@ def test_dataset_dict_like_keys_including_invalid_field(initial):
@given(initial=sst.datasets(for_upload=True))
@settings(max_examples=10)
def test_dataset_dict_like_values(initial: Dataset) -> None:
for key, value in zip(initial.keys(), initial.values()):
for key, value in zip(initial.keys(), initial.values(), strict=True):
assert value == getattr(initial, key)


@given(initial=sst.datasets(for_upload=True))
@settings(max_examples=10)
def test_dataset_dict_like_values_with_invalid_field(initial: Dataset) -> None:
setattr(initial, *invalid_field_example(initial.type))
for key, value in zip(initial.keys(), initial.values()):
for key, value in zip(initial.keys(), initial.values(), strict=True):
assert value == getattr(initial, key)


Expand Down
14 changes: 7 additions & 7 deletions tests/transfer/link_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import hashlib
import sys
from datetime import datetime
from datetime import datetime, timezone

import pytest

Expand Down Expand Up @@ -75,7 +75,7 @@ def test_client_with_link(tmp_path):
accessGroups=["group1"],
contactEmail="[email protected]",
creationLocation="UU",
creationTime=datetime(2023, 6, 23, 10, 0, 0),
creationTime=datetime(2023, 6, 23, 10, 0, 0, tzinfo=timezone.utc),
numberOfFiles=1,
numberOfFilesArchived=0,
owner="PonderStibbons",
Expand All @@ -92,7 +92,7 @@ def test_client_with_link(tmp_path):
path="file1.txt",
size=len(content),
chk=checksum,
time=datetime(2023, 6, 23, 10, 0, 0),
time=datetime(2023, 6, 23, 10, 0, 0, tzinfo=timezone.utc),
)
],
datasetId=ds.pid,
Expand Down Expand Up @@ -134,7 +134,7 @@ def test_client_with_link_local_file_exists(tmp_path):
accessGroups=["group1"],
contactEmail="[email protected]",
creationLocation="UU",
creationTime=datetime(2023, 6, 23, 10, 0, 0),
creationTime=datetime(2023, 6, 23, 10, 0, 0, tzinfo=timezone.utc),
numberOfFiles=1,
numberOfFilesArchived=0,
owner="PonderStibbons",
Expand All @@ -151,7 +151,7 @@ def test_client_with_link_local_file_exists(tmp_path):
path="file1.txt",
size=len(content),
chk=checksum,
time=datetime(2023, 6, 23, 10, 0, 0),
time=datetime(2023, 6, 23, 10, 0, 0, tzinfo=timezone.utc),
)
],
datasetId=ds.pid,
Expand Down Expand Up @@ -195,7 +195,7 @@ def test_client_with_link_local_file_exists_clashing_content(tmp_path):
accessGroups=["group1"],
contactEmail="[email protected]",
creationLocation="UU",
creationTime=datetime(2023, 6, 23, 10, 0, 0),
creationTime=datetime(2023, 6, 23, 10, 0, 0, tzinfo=timezone.utc),
numberOfFiles=1,
numberOfFilesArchived=0,
owner="PonderStibbons",
Expand All @@ -212,7 +212,7 @@ def test_client_with_link_local_file_exists_clashing_content(tmp_path):
path="file1.txt",
size=len(content),
chk=checksum,
time=datetime(2023, 6, 23, 10, 0, 0),
time=datetime(2023, 6, 23, 10, 0, 0, tzinfo=timezone.utc),
)
],
datasetId=ds.pid,
Expand Down
2 changes: 1 addition & 1 deletion tests/transfer/sftp_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ def test_client_with_sftp(
access_groups=["group1"],
contact_email="[email protected]",
creation_location="UU",
creation_time=datetime(2023, 6, 23, 10, 0, 0),
creation_time=datetime(2023, 6, 23, 10, 0, 0, tzinfo=timezone.utc),
owner="PonderStibbons",
owner_group="uu",
principal_investigator="MustrumRidcully",
Expand Down
4 changes: 2 additions & 2 deletions tests/upload_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,10 +171,10 @@ def test_upload_with_only_remote_files_does_not_need_file_transfer(dataset):


def test_upload_with_both_remote_and_local_files(client, dataset_with_files):
original_file_names = set(
original_file_names = {
dataset_with_files.source_folder / file.remote_path
for file in dataset_with_files.files
)
}
dataset_with_files.add_files(
File.from_remote(
remote_path="file1.h5", size=6123, creation_time="2019-09-09T19:29:39Z"
Expand Down

0 comments on commit dbd8278

Please sign in to comment.