Skip to content

Commit

Permalink
Python310 support (#241)
Browse files Browse the repository at this point in the history
* updating README and docs [skip ci]

* chore: attempt 3.10 support

* ci: include 3.10 workflow

* ci: disable clone protection

* ci: mitigate mypy failure

* chore: start fixing mypy errors
  • Loading branch information
jannisborn authored May 23, 2024
1 parent 5373f54 commit 238125a
Show file tree
Hide file tree
Showing 10 changed files with 39 additions and 26 deletions.
1 change: 1 addition & 0 deletions .github/workflows/pypi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ jobs:
max-parallel: 3
matrix:
python-version:
- 3.10
- 3.8
- 3.7
os:
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ jobs:
defaults:
run:
shell: bash -l {0} # for conda command
env:
GIT_CLONE_PROTECTION_ACTIVE: false
steps:
- uses: actions/checkout@v2
- uses: conda-incubator/setup-miniconda@v2
Expand Down Expand Up @@ -48,7 +50,7 @@ jobs:
- name: Check mypy
run: |
conda activate gt4sd
python -m mypy src/gt4sd
python -m mypy src/gt4sd --show-traceback
- name: Run pytests
run: |
conda activate gt4sd
Expand Down
5 changes: 3 additions & 2 deletions conda_cpu_linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ channels:
- https://conda.anaconda.org/pyg
- https://conda.anaconda.org/conda-forge
dependencies:
- python>=3.7,<3.9
- pip>=19.1,<20.3
- setuptools==69.5.1
- python>=3.7,<3.11
- pip=24.0
- pytorch>=1.0,<=1.12.1
- cpuonly
- pytorch-scatter<=2.0.9=*cu102*
Expand Down
5 changes: 3 additions & 2 deletions conda_cpu_mac.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ channels:
- https://conda.anaconda.org/pyg
- https://conda.anaconda.org/conda-forge
dependencies:
- python>=3.7,<3.9
- pip>=19.1,<20.3
- setuptools==69.5.1
- python>=3.7,<3.11
- pip=24.0
- pytorch>=1.0,<=1.12.1
- cpuonly
- pytorch-scatter<=2.0.9
Expand Down
5 changes: 3 additions & 2 deletions conda_gpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ channels:
- https://conda.anaconda.org/pyg
- https://conda.anaconda.org/conda-forge
dependencies:
- python>=3.7,<3.9
- pip>=19.1,<20.3
- setuptools==69.5.1
- python>=3.7,<3.11
- pip=24.0
- pytorch>=1.0,<=1.12.1=*cu*
- pytorch-scatter<=2.0.9=*cu102*
- torchvision<=0.13.1=*cu*
Expand Down
8 changes: 4 additions & 4 deletions dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ flask==1.1.2
flask_login==0.5.0
# isort==5.7.0
licenseheaders==0.8.8
mypy==0.950
myst-parser==0.13.3
pytest==6.1.1
mypy>=1.0.0
myst-parser==1.0.0
pytest==6.2.5
pytest-cov==2.10.1
sphinx==3.4.3
sphinx>=5
sphinx-autodoc-typehints==1.11.1
jinja2<3.1.0
sphinx_rtd_theme==0.5.1
4 changes: 3 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# pypi requirements
setuptools==69.5.1
accelerate>=0.12,<0.20.0
datasets>=1.11.0
diffusers<=0.6.0
Expand All @@ -11,7 +12,7 @@ gt4sd-molformer>=0.1.1
gt4sd-trainer-hf-pl>=0.0.2
keras>=2.3.1,<2.11.0
keybert>=0.7.0
markdown-it-py<3.0.0,>=2.2.0
markdown-it-py
minio==7.0.1
modlamp>=4.0.0
molgx>=0.22.0a1
Expand All @@ -25,6 +26,7 @@ PyTDC==0.3.7
pytorch_lightning<=1.7.7
pyyaml>=5.4.1
rdkit>=2022.3.5
rdkit-stubs>=0.7
regex>=2.5.91
reinvent-chemistry==0.0.38
sacremoses>=0.0.41
Expand Down
5 changes: 4 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -284,4 +284,7 @@ ignore_missing_imports = True
ignore_missing_imports = True

[mypy-xgboost.*]
ignore_missing_imports = True
ignore_missing_imports = True

[mypy-rdkit-stubs.*]
ignore_errors = True
20 changes: 12 additions & 8 deletions src/gt4sd/algorithms/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

from __future__ import annotations

import collections
from collections.abc import Hashable
import logging
import os
import shutil
Expand Down Expand Up @@ -233,7 +233,7 @@ def sample(self, number_of_items: int = 100) -> Iterator[S]:
try:
valid_item = self.configuration.validate_item(item)
# check if sample is hashable
if not isinstance(item, collections.Hashable):
if not isinstance(item, Hashable):
yield valid_item
item_set.add(str(index))
else:
Expand Down Expand Up @@ -623,9 +623,11 @@ def save_version_from_training_pipeline_arguments(
target_version,
)
filepaths_mapping = {
filename: source_filepath
if os.path.exists(source_filepath)
else os.path.join(source_missing_path, filename)
filename: (
source_filepath
if os.path.exists(source_filepath)
else os.path.join(source_missing_path, filename)
)
for filename, source_filepath in filepaths_mapping.items()
}
logger.info(f"Saving artifacts into {target_path}...")
Expand Down Expand Up @@ -713,9 +715,11 @@ def upload_version_from_training_pipeline_arguments(

# mapping between filenames and paths for a version.
filepaths_mapping = {
filename: source_filepath
if os.path.exists(source_filepath)
else os.path.join(source_missing_path, filename)
filename: (
source_filepath
if os.path.exists(source_filepath)
else os.path.join(source_missing_path, filename)
)
for filename, source_filepath in filepaths_mapping.items()
}

Expand Down
8 changes: 3 additions & 5 deletions src/gt4sd/frameworks/cgcnn/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import logging
import os
import random
from typing import Any, Callable, List, Tuple, Union
from typing import Any, Callable, List, Tuple, Union, Optional

import numpy as np
import torch
Expand All @@ -49,7 +49,7 @@ def get_train_val_test_loader(
dataset: torch.utils.data.Dataset,
collate_fn: Callable[[List[Any]], Any] = default_collate,
batch_size: int = 64,
train_ratio: float = None,
train_ratio: Optional[float] = None,
val_ratio: float = 0.1,
test_ratio: float = 0.1,
return_test: bool = False,
Expand Down Expand Up @@ -241,9 +241,7 @@ def expand(self, distances: np.ndarray) -> np.ndarray:
Expanded distance matrix with the last dimension of length
len(self.filter).
"""
return np.exp(
-((distances[..., np.newaxis] - self.filter) ** 2) / self.var**2
)
return np.exp(-((distances[..., np.newaxis] - self.filter) ** 2) / self.var**2)


class AtomInitializer:
Expand Down

0 comments on commit 238125a

Please sign in to comment.