diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index acbbe3e9..335b8dbc 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -31,8 +31,8 @@ jobs: # this way, failuares are treated as a warning and don’t fail the whole workflow. This is sometimes referred to # as a "shadow CI job". # pypy is currently disabled, as it takes a long time to run (>20mins) -# python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.9'] - python-version: ['3.7', '3.8', '3.9', '3.10'] +# python-version: ['3.8', '3.9', '3.10', 'pypy-3.9'] + python-version: ['3.8', '3.9', '3.10'] experimental: [false] # I'm temporarily disabling the build on the 3.11 alpha, as the installation aims at buiilding the scipy wheel, diff --git a/.readthedocs.yml b/.readthedocs.yml index c97d251a..c5c6292e 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -15,6 +15,6 @@ formats: # Optionally set the version of Python and requirements required to build your docs python: - version: 3.7 + version: "3.9" install: - requirements: docs/requirements.txt diff --git a/docs/installation.md b/docs/installation.md index ca9a6445..8837c31c 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -2,7 +2,7 @@ # Installing Tarski ## Software Requirements -Tarski is mostly developed in Python, and requires a working Python>=3.6 installation. +Tarski is mostly developed in Python, and requires a working Python>=3.8 installation. We strongly recommend installing Tarski within a Python [virtual environment](https://docs.python.org/3/tutorial/venv.html). The installation instructions below will install for you any additional diff --git a/setup.py b/setup.py index 5c5137dd..c4867e8f 100644 --- a/setup.py +++ b/setup.py @@ -41,7 +41,6 @@ def main(): "License :: OSI Approved :: Apache Software License", 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', @@ -50,14 +49,12 @@ def main(): packages=find_packages('src'), # include all packages under src package_dir={'': 'src'}, # tell distutils packages are under src - python_requires='>=3.6', # supported Python ranges + python_requires='>=3.8', # supported Python ranges install_requires=[ # psutil not supported on Windows, we haven't tested in other platforms, but since it's not essential # to the functioning of Tarski, better be conservative here and install only on Linux. 'psutil; platform_system=="Linux"', - 'multipledispatch', - # Antlr pinned to a specific version to avoid messages "ANTLR runtime and generated code versions disagree" # messages. If we want to bump this up, we'll need to regenerate the grammar files with the new version. 'antlr4-python3-runtime==4.7.2', diff --git a/src/tarski/__init__.py b/src/tarski/__init__.py index f5947707..6e2680d6 100644 --- a/src/tarski/__init__.py +++ b/src/tarski/__init__.py @@ -1,14 +1,19 @@ - import logging -from .version import __version__, __version_info__ +import sys + +from . import fstrips +from .errors import LanguageError from .fol import FirstOrderLanguage -from .theories import language +from .syntax import Constant, Formula, Function, Predicate, Term, Variable from .theories import Theory as Theories -from .syntax import Function, Predicate, Formula, Term, Constant, Variable -from .errors import LanguageError -from . import fstrips +from .theories import language +from .version import __version__, __version_info__ logging.getLogger(__name__).addHandler(logging.NullHandler()) + +if sys.version_info < (3, 8, 0): + raise OSError(f'Tarski requires Python>=3.8, but yours is {sys.version_info}') + __all__ = ['__version__', '__version_info__', 'FirstOrderLanguage', 'language', 'Theories', 'Function', 'Predicate', 'Formula', 'Term', 'Constant', 'Variable', 'LanguageError', 'fstrips'] diff --git a/src/tarski/analysis/csp.py b/src/tarski/analysis/csp.py index 7b9af04a..6c7677e7 100644 --- a/src/tarski/analysis/csp.py +++ b/src/tarski/analysis/csp.py @@ -5,7 +5,7 @@ from collections import defaultdict from ..errors import TarskiError -from ..syntax import CompoundFormula, Atom, Connective, Variable, Constant +from ..syntax import Atom, CompoundFormula, Connective, Constant, Variable class WrongFormalismError(TarskiError): @@ -77,7 +77,7 @@ def check_hypergraph_acyclicity(hypergraph): Abiteboul, S., Hull, R. and Vianu, V (1995). Foundations of Databases, pp.131-132. """ nodes = set(itertools.chain.from_iterable(hypergraph)) - edges = set(frozenset(x) for x in hypergraph) # simply convert the tuple into frozensets + edges = {frozenset(x) for x in hypergraph} # simply convert the tuple into frozensets if len(edges) <= 1 or len(nodes) <= 1: return True diff --git a/src/tarski/analysis/csp_schema.py b/src/tarski/analysis/csp_schema.py index 2dedeeaa..ecb565b2 100644 --- a/src/tarski/analysis/csp_schema.py +++ b/src/tarski/analysis/csp_schema.py @@ -2,13 +2,15 @@ from enum import Enum from pathlib import Path -from ..fstrips.manipulation import Simplify from ..errors import TarskiError -from ..fstrips.representation import is_conjunction_of_literals, has_state_variable_shape, \ - collect_effect_free_parameters +from ..fstrips.manipulation import Simplify +from ..fstrips.representation import (collect_effect_free_parameters, + has_state_variable_shape, + is_conjunction_of_literals) from ..grounding.common import StateVariableLite -from ..syntax import QuantifiedFormula, Quantifier, Contradiction, CompoundFormula, Atom, CompoundTerm, \ - is_neg, symref, Constant, Variable, Tautology, top +from ..syntax import (Atom, CompoundFormula, CompoundTerm, Constant, + Contradiction, QuantifiedFormula, Quantifier, Tautology, + Variable, is_neg, symref, top) from ..syntax.ops import collect_unique_nodes, flatten from ..syntax.transform import to_prenex_negation_normal_form diff --git a/src/tarski/benchmarks/blocksworld.py b/src/tarski/benchmarks/blocksworld.py index a7e1ad93..0e61f155 100644 --- a/src/tarski/benchmarks/blocksworld.py +++ b/src/tarski/benchmarks/blocksworld.py @@ -5,12 +5,12 @@ import tarski as tsk from tarski import fstrips as fs -from tarski.fstrips import DelEffect, AddEffect +from tarski.fstrips import AddEffect, DelEffect + from ..fstrips import create_fstrips_problem from ..syntax import land from ..theories import Theory - BASE_DOMAIN_NAME = "blocksworld" @@ -24,7 +24,7 @@ def generate_strips_bw_language(nblocks=4): lang.predicate('on', object_t, object_t) _ = [lang.predicate(p, object_t) for p in "ontable clear holding".split()] - _ = [lang.constant('b{}'.format(k), object_t) for k in range(1, nblocks + 1)] + _ = [lang.constant(f'b{k}', object_t) for k in range(1, nblocks + 1)] return lang @@ -41,7 +41,7 @@ def generate_fstrips_bw_language(nblocks=4): # Table and blocks lang.constant('table', place) - _ = [lang.constant('b{}'.format(k), block) for k in range(1, nblocks + 1)] + _ = [lang.constant(f'b{k}', block) for k in range(1, nblocks + 1)] return lang diff --git a/src/tarski/benchmarks/counters.py b/src/tarski/benchmarks/counters.py index bdb4ca49..706e0d33 100644 --- a/src/tarski/benchmarks/counters.py +++ b/src/tarski/benchmarks/counters.py @@ -5,7 +5,6 @@ from ..syntax import land from ..theories import Theory - BASE_DOMAIN_NAME = "counters-fn" diff --git a/src/tarski/dl/__init__.py b/src/tarski/dl/__init__.py index 7fee141c..dc52934a 100644 --- a/src/tarski/dl/__init__.py +++ b/src/tarski/dl/__init__.py @@ -1,8 +1,44 @@ - -from .concepts import Concept, PrimitiveConcept, UniversalConcept, NotConcept, ExistsConcept, ForallConcept, \ - EqualConcept, AndConcept, OrConcept, EmptyConcept, NominalConcept, NullaryAtom, GoalConcept, GoalNullaryAtom -from .concepts import Role, PrimitiveRole, InverseRole, StarRole, RestrictRole, CompositionRole, GoalRole -from .features import MinDistanceFeature, ConceptCardinalityFeature, EmpiricalBinaryConcept, FeatureValueChange,\ - NullaryAtomFeature, Feature -from .factory import SyntacticFactory, compute_dl_vocabulary +from .concepts import (AndConcept, CompositionRole, Concept, EmptyConcept, + EqualConcept, ExistsConcept, ForallConcept, GoalConcept, + GoalNullaryAtom, GoalRole, InverseRole, NominalConcept, + NotConcept, NullaryAtom, OrConcept, PrimitiveConcept, + PrimitiveRole, RestrictRole, Role, StarRole, + UniversalConcept) from .errors import ArityDLMismatch +from .factory import SyntacticFactory, compute_dl_vocabulary +from .features import (ConceptCardinalityFeature, EmpiricalBinaryConcept, + Feature, FeatureValueChange, MinDistanceFeature, + NullaryAtomFeature) + +__all__ = [ + 'AndConcept', + 'ArityDLMismatch', + 'CompositionRole', + 'Concept', + 'ConceptCardinalityFeature', + 'EmpiricalBinaryConcept', + 'EmptyConcept', + 'EqualConcept', + 'ExistsConcept', + 'Feature', + 'FeatureValueChange', + 'ForallConcept', + 'GoalConcept', + 'GoalNullaryAtom', + 'GoalRole', + 'InverseRole', + 'MinDistanceFeature', + 'NominalConcept', + 'NotConcept', + 'NullaryAtom', + 'NullaryAtomFeature', + 'OrConcept', + 'PrimitiveConcept', + 'PrimitiveRole', + 'RestrictRole', + 'Role', + 'StarRole', + 'SyntacticFactory', + 'UniversalConcept', + 'compute_dl_vocabulary', +] diff --git a/src/tarski/dl/concepts.py b/src/tarski/dl/concepts.py index 7ad4f1da..e803c5e9 100644 --- a/src/tarski/dl/concepts.py +++ b/src/tarski/dl/concepts.py @@ -1,7 +1,7 @@ """ """ -from ..syntax import Predicate, Function, Sort +from ..syntax import Function, Predicate, Sort from ..utils.algorithms import transitive_closure from ..utils.hashing import consistent_hash from .errors import ArityDLMismatch @@ -25,7 +25,7 @@ def __eq__(self, other): and self.name == other.name) def __repr__(self): - return "{}".format(self.name) + return f"{self.name}" __str__ = __repr__ @@ -35,7 +35,7 @@ def denotation(self, model): class GoalNullaryAtom(NullaryAtom): def __repr__(self): - return "{}_g".format(self.name) + return f"{self.name}_g" __str__ = __repr__ @@ -133,7 +133,7 @@ def denotation(self, model): return model.compressed(model.primitive_denotation(self), self.ARITY) def __repr__(self): - return "Nominal({})".format(self.name) + return f"Nominal({self.name})" __str__ = __repr__ @@ -166,7 +166,7 @@ def denotation(self, model): return model.compressed(model.primitive_denotation(self), self.ARITY) def __repr__(self): - return "{}".format(self.name) + return f"{self.name}" __str__ = __repr__ @@ -176,7 +176,7 @@ def flatten(self): class GoalConcept(PrimitiveConcept): def __repr__(self): - return "{}_g".format(self.name) + return f"{self.name}_g" __str__ = __repr__ @@ -199,7 +199,7 @@ def denotation(self, model): return ~model.compressed_denotation(self.c) def __repr__(self): - return 'Not({})'.format(self.c) + return f'Not({self.c})' __str__ = __repr__ @@ -230,7 +230,7 @@ def denotation(self, model): return ext_c1 & ext_c2 def __repr__(self): - return 'And({},{})'.format(self.c1, self.c2) + return f'And({self.c1},{self.c2})' __str__ = __repr__ @@ -261,7 +261,7 @@ def denotation(self, model): return ext_c1 | ext_c2 def __repr__(self): - return 'Or({},{})'.format(self.c1, self.c2) + return f'Or({self.c1},{self.c2})' __str__ = __repr__ @@ -291,11 +291,11 @@ def denotation(self, model): ext_c = model.uncompressed_denotation(self.c) ext_r = model.uncompressed_denotation(self.r) # result = [x for x in objects if [z for (y, z) in ext_r if y == x and z in ext_c]] - result = set(x for x, y in ext_r if y in ext_c) + result = {x for x, y in ext_r if y in ext_c} return model.compressed(result, self.ARITY) def __repr__(self): - return 'Exists({},{})'.format(self.r, self.c) + return f'Exists({self.r},{self.c})' __str__ = __repr__ @@ -332,7 +332,7 @@ def denotation(self, model): return model.compressed(result, self.ARITY) def __repr__(self): - return 'Forall({},{})'.format(self.r, self.c) + return f'Forall({self.r},{self.c})' __str__ = __repr__ @@ -363,14 +363,14 @@ def denotation(self, model): ext_r2 = model.uncompressed_denotation(self.r2) result = set() for x in universe: - left = set(z for (y, z) in ext_r1 if y == x) - right = set(z for (y, z) in ext_r2 if y == x) + left = {z for (y, z) in ext_r1 if y == x} + right = {z for (y, z) in ext_r2 if y == x} if left == right: result.add(x) return model.compressed(result, self.ARITY) def __repr__(self): - return 'Equal({},{})'.format(self.r1, self.r2) + return f'Equal({self.r1},{self.r2})' __str__ = __repr__ @@ -402,7 +402,7 @@ def denotation(self, model): return model.compressed(model.primitive_denotation(self), self.ARITY) def __repr__(self): - return '{}'.format(self.name) + return f'{self.name}' __str__ = __repr__ @@ -412,7 +412,7 @@ def flatten(self): class GoalRole(PrimitiveRole): def __repr__(self): - return "{}_g".format(self.name) + return f"{self.name}_g" __str__ = __repr__ @@ -434,11 +434,11 @@ def __eq__(self, other): def denotation(self, model): ext_r = model.uncompressed_denotation(self.r) - result = set((y, x) for (x, y) in ext_r) + result = {(y, x) for (x, y) in ext_r} return model.compressed(result, self.ARITY) def __repr__(self): - return 'Inverse({})'.format(self.r) + return f'Inverse({self.r})' __str__ = __repr__ @@ -466,7 +466,7 @@ def denotation(self, model): return model.compressed(result, self.ARITY) def __repr__(self): - return 'Star({})'.format(self.r) + return f'Star({self.r})' __str__ = __repr__ @@ -505,7 +505,7 @@ def denotation(self, model): return model.compressed(result, self.ARITY) def __repr__(self): - return 'Composition({},{})'.format(self.r1, self.r2) + return f'Composition({self.r1},{self.r2})' __str__ = __repr__ @@ -533,11 +533,11 @@ def __eq__(self, other): def denotation(self, model): ext_c = model.uncompressed_denotation(self.c) ext_r = model.uncompressed_denotation(self.r) - result = set((x, y) for (x, y) in ext_r if y in ext_c) + result = {(x, y) for (x, y) in ext_r if y in ext_c} return model.compressed(result, self.ARITY) def __repr__(self): - return 'Restrict({},{})'.format(self.r, self.c) + return f'Restrict({self.r},{self.c})' __str__ = __repr__ @@ -547,4 +547,4 @@ def flatten(self): def _check_arity(term, expected_arity, predfun): if expected_arity != predfun.uniform_arity(): - raise ArityDLMismatch('Cannot create {} from predicate "{}"'.format(term, predfun)) + raise ArityDLMismatch(f'Cannot create {term} from predicate "{predfun}"') diff --git a/src/tarski/dl/errors.py b/src/tarski/dl/errors.py index 7e6ed8e9..2c3c93bd 100644 --- a/src/tarski/dl/errors.py +++ b/src/tarski/dl/errors.py @@ -1,4 +1,3 @@ - from ..errors import TarskiError diff --git a/src/tarski/dl/factory.py b/src/tarski/dl/factory.py index becbcae9..0a91935d 100644 --- a/src/tarski/dl/factory.py +++ b/src/tarski/dl/factory.py @@ -1,11 +1,12 @@ - import logging from .. import FirstOrderLanguage from ..syntax import builtins -from . import Concept, Role, UniversalConcept, PrimitiveConcept, NotConcept, ExistsConcept, ForallConcept, \ - EqualConcept, PrimitiveRole, RestrictRole, AndConcept, EmptyConcept, CompositionRole, NominalConcept, NullaryAtom, \ - GoalNullaryAtom, GoalConcept, GoalRole, OrConcept +from . import (AndConcept, CompositionRole, Concept, EmptyConcept, + EqualConcept, ExistsConcept, ForallConcept, GoalConcept, + GoalNullaryAtom, GoalRole, NominalConcept, NotConcept, + NullaryAtom, OrConcept, PrimitiveConcept, PrimitiveRole, + RestrictRole, Role, UniversalConcept) def filter_subnodes(elem, t): @@ -57,7 +58,7 @@ def generate_primitives_from_language(self, nominals, types, goal_predicates): roles.append(GoalRole(predfun)) else: - logging.warning('Predicate/Function "{}" with normalized arity > 2 ignored'.format(predfun)) + logging.warning(f'Predicate/Function "{predfun}" with normalized arity > 2 ignored') for c in nominals: concepts.append(NominalConcept(c.symbol, c.sort)) @@ -78,12 +79,12 @@ def create_exists_concept(self, role: Role, concept: Concept): _, s2 = role.sort if concept == self.bot: - logging.debug('Concept "{}" is statically empty'.format(result)) + logging.debug(f'Concept "{result}" is statically empty') return None # TODO ADD: If C is a sort-concept of the same sort than s2, then the concept will be equiv to exist(R.True) if not self.language.are_vertically_related(s2, concept.sort): - logging.debug('Concept "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Concept "{result}" pruned for type-inconsistency reasons') return None if isinstance(role, RestrictRole) and concept == self.top: @@ -109,7 +110,7 @@ def create_forall_concept(self, role: Role, concept: Concept): return None if not self.language.are_vertically_related(s2, concept.sort): - logging.debug('Concept "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Concept "{result}" pruned for type-inconsistency reasons') return None return result @@ -123,12 +124,12 @@ def create_and_concept(self, c1: Concept, c2: Concept): return None # No sense in C and C if c1 in (self.top, self.bot) or c2 in (self.top, self.bot): - logging.debug('AND of {} and {} pruned, no sense in AND\'ing with top or bot'.format(c1, c2)) + logging.debug(f'AND of {c1} and {c2} pruned, no sense in AND\'ing with top or bot') return None if sort is None: # i.e. c1 and c2 are disjoint types - logging.debug('AND of {} and {} pruned for type-inconsistency reasons'.format(c1, c2)) + logging.debug(f'AND of {c1} and {c2} pruned for type-inconsistency reasons') return None return AndConcept(c1, c2, sort) @@ -140,7 +141,7 @@ def create_or_concept(self, c1: Concept, c2: Concept): return None # No sense in C OR C if c1 in (self.top, self.bot) or c2 in (self.top, self.bot): - logging.debug('OR of {} and {} pruned, no sense in OR\'ing with top or bot'.format(c1, c2)) + logging.debug(f'OR of {c1} and {c2} pruned, no sense in OR\'ing with top or bot') return None return OrConcept(c1, c2, sort) @@ -151,7 +152,7 @@ def create_equal_concept(self, r1: Role, r2: Role): sort = self.language.most_restricted_type(r1.sort[0], r2.sort[0]) if sort is None: - logging.debug('Concept "EqualConcept({},{})" pruned for type-inconsistency reasons'.format(r1, r2)) + logging.debug(f'Concept "EqualConcept({r1},{r2})" pruned for type-inconsistency reasons') return None return EqualConcept(r1, r2, sort) @@ -159,15 +160,15 @@ def create_restrict_role(self, r: Role, c: Concept): result = RestrictRole(r, c) if not self.language.are_vertically_related(r.sort[1], c.sort): - logging.debug('Role "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Role "{result}" pruned for type-inconsistency reasons') return None if isinstance(c, UniversalConcept) or c == self.bot: - logging.debug('Role "{}" pruned; no sense in restricting to top / bot concepts'.format(result)) + logging.debug(f'Role "{result}" pruned; no sense in restricting to top / bot concepts') return None if isinstance(r, RestrictRole): - logging.debug('Role "{}" pruned; no direct nesting of restrictions'.format(result)) + logging.debug(f'Role "{result}" pruned; no direct nesting of restrictions') return None return result @@ -182,12 +183,12 @@ def create_composition_role(self, r1: Role, r2: Role): result = CompositionRole(r1, r2) if not self.language.are_vertically_related(r1.sort[1], r2.sort[0]): - logging.debug('Role "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Role "{result}" pruned for type-inconsistency reasons') return None num_comp = len(filter_subnodes(result, CompositionRole)) if num_comp > 2: - logging.debug('Role "{}" pruned: number of compositions ({}) exceeds threshold'.format(result, num_comp)) + logging.debug(f'Role "{result}" pruned: number of compositions ({num_comp}) exceeds threshold') return None return result diff --git a/src/tarski/dl/features.py b/src/tarski/dl/features.py index 6b620c42..5f8cfe7c 100644 --- a/src/tarski/dl/features.py +++ b/src/tarski/dl/features.py @@ -3,9 +3,9 @@ """ from enum import Enum -from .concepts import Concept, Role, NullaryAtom from ..utils.algorithms import compute_min_distance from ..utils.hashing import consistent_hash +from .concepts import Concept, NullaryAtom, Role class FeatureValueChange(Enum): @@ -92,7 +92,7 @@ def diff(self, x, y): return compute_int_feature_diff(x, y) def __repr__(self): - return 'Num[{}]'.format(self.c) + return f'Num[{self.c}]' __str__ = __repr__ @@ -126,7 +126,7 @@ def diff(self, x, y): return compute_bool_feature_diff(x, y) def __repr__(self): - return 'Bool[{}]'.format(self.c) + return f'Bool[{self.c}]' __str__ = __repr__ @@ -214,7 +214,7 @@ def diff(self, x, y): return compute_int_feature_diff(x, y) def __repr__(self): - return 'Dist[{};{};{}]'.format(self.c1, self.r, self.c2) + return f'Dist[{self.c1};{self.r};{self.c2}]' __str__ = __repr__ @@ -269,7 +269,7 @@ def diff(self, x, y): return compute_bool_feature_diff(x, y) def __repr__(self): - return 'Atom[{}]'.format(self.atom) + return f'Atom[{self.atom}]' __str__ = __repr__ diff --git a/src/tarski/errors.py b/src/tarski/errors.py index 4e5c9746..715da88e 100644 --- a/src/tarski/errors.py +++ b/src/tarski/errors.py @@ -1,4 +1,3 @@ - class TarskiError(Exception): """ Common ancestor class to all of Tarski's exceptions """ @@ -23,24 +22,21 @@ def __init__(self, msg=None): class LanguageMismatch(SyntacticError): def __init__(self, obj, l1, l2, msg=None): - msg = msg or ('Language mismatch when operating on object {obj} of type {classname}.\n' - 'Expected language: {l2}\n' - 'Actual language: : {l1}\n') \ - .format(obj=obj, classname=type(obj).__name__, l1=l1, l2=l2) + msg = msg or (f'Language mismatch when operating on object {obj} of type {type(obj).__name__}.\n' + f'Expected language: {l2}\n' + f'Actual language: : {l1}\n') super().__init__(msg) class ArityMismatch(SyntacticError): def __init__(self, head, arguments, msg=None): - msg = msg or 'Arity mismatch applying element {} with arity {} to arguments {}'. \ - format(head, head.arity, arguments) + msg = msg or f'Arity mismatch applying element {head} with arity {head.arity} to arguments {arguments}' super().__init__(msg) class SortMismatch(SyntacticError): def __init__(self, element, type_, expected_type, msg=None): - msg = msg or 'Sort mismatch on element {}. Expected sort was "{}", element has sort "{}"'.format( - element, expected_type, type_) + msg = msg or f'Sort mismatch on element {element}. Expected sort: "{expected_type}". Actual sort: "{type_}"' super().__init__(msg) @@ -89,7 +85,7 @@ class DuplicateActionDefinition(DuplicateDefinition): class DuplicateVariableDefinition(DuplicateDefinition): def __init__(self, variable, other, msg=None): - msg = msg or "Variable with name '{}' already defined in binding: {}".format(variable.symbol, other) + msg = msg or f"Variable with name '{variable.symbol}' already defined in binding: {other}" super().__init__(variable, other, msg) @@ -123,25 +119,24 @@ class UndefinedVariable(UndefinedElement): class UnboundVariable(SemanticError): def __init__(self, var, msg=None): - msg = msg or 'Attempted to evaluate open formula with free variable {}'.format(var) + msg = msg or f'Attempted to evaluate open formula with free variable {var}' super().__init__(msg) class IncorrectExtensionDefinition(SemanticError): def __init__(self, element, point, value, msg=None): - msg = msg or 'Incorrect definition of extension of symbol "{}". Cannot assign value "{}" to point "{}"'.format( - element, value, point) + msg = msg or f'Incorrect extension of symbol "{element}". Cannot assign value "{value}" to point "{point}"' super().__init__(msg) class UnknownTheory(LanguageError): def __init__(self, theory): - super().__init__('Unknown first-order theory "{}"'.format(theory)) + super().__init__(f'Unknown first-order theory "{theory}"') class CommandNotFoundError(TarskiError): def __init__(self, name, msg=None): - msg = msg or 'Necessary command "{}" could not be found'.format(name) + msg = msg or f'Necessary command "{name}" could not be found' super().__init__(msg) diff --git a/src/tarski/evaluators/simple.py b/src/tarski/evaluators/simple.py index 832be930..f933335c 100644 --- a/src/tarski/evaluators/simple.py +++ b/src/tarski/evaluators/simple.py @@ -1,12 +1,14 @@ import operator from typing import List -from .. import funcsym from .. import errors as err -from ..syntax import ops, Connective, Atom, Formula, CompoundFormula, QuantifiedFormula, builtins, Variable, \ - Constant, CompoundTerm, Tautology, Contradiction, IfThenElse, AggregateCompoundTerm, Term -from ..syntax.algebra import Matrix +from .. import funcsym from ..model import Model +from ..syntax import (AggregateCompoundTerm, Atom, CompoundFormula, + CompoundTerm, Connective, Constant, Contradiction, + Formula, IfThenElse, QuantifiedFormula, Tautology, Term, + Variable, builtins, ops) +from ..syntax.algebra import Matrix # TODO We will need to extend this so that the interpretation depends on a certain, given sigma of values to @@ -125,7 +127,7 @@ def symbolic_matrix_multiplication(lhs: Matrix, rhs: Matrix): C, D = rhs.shape if B != C: - raise TypeError('matrices {}x{} and {}x{} cannot be multiplied together'.format(A, B, C, D)) + raise TypeError(f'matrices {A}x{B} and {C}x{D} cannot be multiplied together') zip_b = list(zip(*rhs.matrix)) return [[sum(ele_a * ele_b for ele_a, ele_b in zip(row_a, col_b)) diff --git a/src/tarski/fol.py b/src/tarski/fol.py index 38c3f6cd..3b3e2a7b 100644 --- a/src/tarski/fol.py +++ b/src/tarski/fol.py @@ -1,14 +1,14 @@ - import copy import itertools -from collections import defaultdict, OrderedDict +from collections import OrderedDict, defaultdict from typing import Union, cast from . import errors as err +from . import modules from .errors import UndefinedElement -from .syntax import Function, Constant, Variable, Sort, inclusion_closure, Predicate, Interval +from .syntax import (Constant, Function, Interval, Predicate, Sort, Variable, + inclusion_closure) from .syntax.algebra import Matrix -from . import modules class FirstOrderLanguage: @@ -224,7 +224,7 @@ def _retrieve_object(self, obj, type_): # obj must be a string, which we take as the name of a language element if type_ not in self._element_containers: - raise RuntimeError("Trying to index incorrect type {}".format(type_)) + raise RuntimeError(f"Trying to index incorrect type {type_}") if obj not in self._element_containers[type_]: raise err.UndefinedElement(obj) @@ -268,8 +268,7 @@ def constants(self): @staticmethod def vector(arraylike, sort: Sort): - np = modules.import_numpy() - return Matrix(np.reshape(arraylike, (len(arraylike), 1)), sort) + return Matrix(modules.numpy.reshape(arraylike, (len(arraylike), 1)), sort) @staticmethod def matrix(arraylike, sort: Sort): @@ -339,7 +338,7 @@ def dump(self): def check_well_formed(self): for _, s in self._sorts.items(): if s.cardinality() == 0: - raise err.LanguageError("Sort '{}' is empty!".format(s)) + raise err.LanguageError(f"Sort '{s}' is empty!") def most_restricted_type(self, t1, t2): if self.is_subtype(t1, t2): @@ -362,7 +361,7 @@ def connected_in_type_hierarchy(self, t_0, t_goal): """ if t_goal in self.indirect_ancestor_sorts[t_0]: return True - OPEN = [t for t in self.ancestor_sorts[t_0]] + OPEN = list(self.ancestor_sorts[t_0]) while len(OPEN) != 0: t = OPEN.pop() if t == t_goal: diff --git a/src/tarski/fstrips/__init__.py b/src/tarski/fstrips/__init__.py index 79829711..770ce035 100644 --- a/src/tarski/fstrips/__init__.py +++ b/src/tarski/fstrips/__init__.py @@ -1,7 +1,30 @@ - -from .problem import Problem, create_fstrips_problem from .action import Action from .derived import Derived -from .fstrips import BaseEffect, SingleEffect, AddEffect, DelEffect, FunctionalEffect, IncreaseEffect, \ - LiteralEffect, UniversalEffect, ChoiceEffect, VectorisedEffect, LinearEffect, BlackBoxEffect,\ - language, OptimizationMetric, OptimizationType +from .fstrips import (AddEffect, BaseEffect, BlackBoxEffect, ChoiceEffect, + DelEffect, FunctionalEffect, IncreaseEffect, + LinearEffect, LiteralEffect, OptimizationMetric, + OptimizationType, SingleEffect, UniversalEffect, + VectorisedEffect, language) +from .problem import Problem, create_fstrips_problem + +__all__ = [ + 'Action', + 'AddEffect', + 'BaseEffect', + 'BlackBoxEffect', + 'ChoiceEffect', + 'DelEffect', + 'Derived', + 'FunctionalEffect', + 'IncreaseEffect', + 'LinearEffect', + 'LiteralEffect', + 'OptimizationMetric', + 'OptimizationType', + 'Problem', + 'SingleEffect', + 'UniversalEffect', + 'VectorisedEffect', + 'create_fstrips_problem', + 'language', +] diff --git a/src/tarski/fstrips/action.py b/src/tarski/fstrips/action.py index a91305cc..7e5f3ac5 100644 --- a/src/tarski/fstrips/action.py +++ b/src/tarski/fstrips/action.py @@ -19,7 +19,7 @@ def __lt__(self, other): return self.name < other.name def ident(self): - paramlist = "{}".format(','.join("{}: {}".format(p.symbol, p.sort.name) for p in self.parameters)) + paramlist = "{}".format(','.join(f"{p.symbol}: {p.sort.name}" for p in self.parameters)) return f'{self.name}({paramlist})' def __str__(self): @@ -27,8 +27,8 @@ def __str__(self): __repr__ = __str__ def print(self): - tokens = ['{}:'.format(self.ident()), - 'pre=({})'.format(self.precondition), + tokens = [f'{self.ident()}:', + f'pre=({self.precondition})', 'eff=({})'.format(' & '.join(str(eff) for eff in self.effects))] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/contingent/__init__.py b/src/tarski/fstrips/contingent/__init__.py index 427b0f2b..3b53d6e4 100644 --- a/src/tarski/fstrips/contingent/__init__.py +++ b/src/tarski/fstrips/contingent/__init__.py @@ -1,4 +1,4 @@ from .problem import ContingentProblem as Problem -from .. action import Action from .sensor import Sensor -from . import errors + +__all__ = ['Problem', 'Sensor'] diff --git a/src/tarski/fstrips/contingent/errors.py b/src/tarski/fstrips/contingent/errors.py index 9b6c1b5d..4337b89f 100644 --- a/src/tarski/fstrips/contingent/errors.py +++ b/src/tarski/fstrips/contingent/errors.py @@ -1,5 +1,4 @@ - -from ...errors import TarskiError, DuplicateDefinition, UndefinedElement +from ...errors import DuplicateDefinition, TarskiError, UndefinedElement class ObservationExpressivenessMismatch(TarskiError): diff --git a/src/tarski/fstrips/contingent/problem.py b/src/tarski/fstrips/contingent/problem.py index 7a1cf5f0..eacdec29 100644 --- a/src/tarski/fstrips/contingent/problem.py +++ b/src/tarski/fstrips/contingent/problem.py @@ -1,8 +1,8 @@ - from collections import OrderedDict + from ..problem import Problem -from .sensor import Sensor from . import errors as err +from .sensor import Sensor class ContingentProblem(Problem): @@ -33,6 +33,6 @@ def get_symbols(self, pv, ev, cv): pv.visit(sensor.obs) def __str__(self): - return 'FSTRIPS Contingent Problem "{}", domain "{}"'.format(self.name, self.domain_name) + return f'FSTRIPS Contingent Problem "{self.name}", domain "{self.domain_name}"' __repr__ = __str__ diff --git a/src/tarski/fstrips/contingent/sensor.py b/src/tarski/fstrips/contingent/sensor.py index 124d6d66..a82054c8 100644 --- a/src/tarski/fstrips/contingent/sensor.py +++ b/src/tarski/fstrips/contingent/sensor.py @@ -1,4 +1,4 @@ -from ...syntax import CompoundFormula, Atom, Connective +from ...syntax import Atom, CompoundFormula, Connective from . import errors as err @@ -25,7 +25,7 @@ def _check_well_formed(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.name, params) + return f'{self.name}({params})' def dump(self): return dict(name=self.name, @@ -34,7 +34,7 @@ def dump(self): obs=[eff.dump() for eff in self.effects.dump()]) def __str__(self): - tokens = ['action {}:'.format(self.name), - 'C=({})'.format(self.condition), - 'L=({})'.format(str(self.obs))] + tokens = [f'action {self.name}:', + f'C=({self.condition})', + f'L=({str(self.obs)})'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/derived.py b/src/tarski/fstrips/derived.py index 5b1f11c4..017882d3 100644 --- a/src/tarski/fstrips/derived.py +++ b/src/tarski/fstrips/derived.py @@ -27,10 +27,10 @@ def dump(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.predicate.symbol, params) + return f'{self.predicate.symbol}({params})' def __str__(self): tokens = ['derived {} {}:'.format(self.predicate.symbol, ' '.join(map(str, self.parameters))), - 'formula=({})'.format(self.formula)] + f'formula=({self.formula})'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/errors.py b/src/tarski/fstrips/errors.py index 16d31024..6d045dc1 100644 --- a/src/tarski/fstrips/errors.py +++ b/src/tarski/fstrips/errors.py @@ -1,17 +1,16 @@ - -from ..errors import TarskiError, DuplicateDefinition, UndefinedElement +from ..errors import DuplicateDefinition, TarskiError, UndefinedElement class IncompleteProblemError(TarskiError): def __init__(self, problem, msg=None): msg = msg or 'specification is incomplete!' - super().__init__('Problem "{}": {}'.format(problem.name, msg)) + super().__init__(f'Problem "{problem.name}": {msg}') class InvalidEffectError(TarskiError): def __init__(self, effect, msg=None): - msg = msg or 'definition of effect "{}" is invalid!'.format(effect.tostring()) - super().__init__('{}'.format(msg)) + msg = msg or f'definition of effect "{effect.tostring()}" is invalid!' + super().__init__(f'{msg}') class DuplicateActionDefinition(DuplicateDefinition): @@ -31,5 +30,5 @@ def __init__(self, symbol, formula, msg=None): if msg is None: msg = ' ' - msg = 'definition of derived predicate "{} \\equiv {}" is invalid! {}'.format(symbol, formula, msg) + msg = f'definition of derived predicate "{symbol} \\equiv {formula}" is invalid! {msg}' super().__init__(msg) diff --git a/src/tarski/fstrips/fstrips.py b/src/tarski/fstrips/fstrips.py index 6b5acec4..6b52c7bf 100644 --- a/src/tarski/fstrips/fstrips.py +++ b/src/tarski/fstrips/fstrips.py @@ -1,9 +1,8 @@ - from enum import Enum -from typing import Union, List, Optional, Callable, Any +from typing import Any, Callable, List, Optional, Union -from ..syntax import CompoundTerm, Term, symref, top from .. import theories as ths +from ..syntax import CompoundTerm, Term, symref, top from .errors import InvalidEffectError @@ -18,7 +17,7 @@ def __str__(self): class SingleEffect(BaseEffect): def __str__(self): - return "({} -> {})".format(self.condition, self.tostring()) + return f"({self.condition} -> {self.tostring()})" __repr__ = __str__ @@ -33,7 +32,7 @@ def __init__(self, atom, condition=top): self.atom = atom def tostring(self): - return "ADD({})".format(self.atom) + return f"ADD({self.atom})" class DelEffect(SingleEffect): @@ -43,7 +42,7 @@ def __init__(self, atom, condition=top): self.atom = atom def tostring(self): - return "DEL({})".format(self.atom) + return f"DEL({self.atom})" class LiteralEffect(SingleEffect): @@ -52,7 +51,7 @@ def __init__(self, lit, condition=top): self.lit = lit def tostring(self): - return "LIT({})".format(self.lit) + return f"LIT({self.lit})" class FunctionalEffect(SingleEffect): @@ -156,7 +155,7 @@ def __init__(self, lhs, rhs, condition=top): self.check_well_formed() def tostring(self): - return "VectorisedEffect({} := {})".format(self.lhs, self.rhs) + return f"VectorisedEffect({self.lhs} := {self.rhs})" def check_well_formed(self): if not hasattr(self.lhs, 'shape'): @@ -195,7 +194,7 @@ def __init__(self, y, a, x, b, condition=top): self.check_well_formed() def tostring(self): - return "LinearEffect({} := {} * {} + {})".format(self.y, self.A, self.x, self.b) + return f"LinearEffect({self.y} := {self.A} * {self.x} + {self.b})" def check_well_formed(self): if not hasattr(self.y, 'shape'): diff --git a/src/tarski/fstrips/hybrid/__init__.py b/src/tarski/fstrips/hybrid/__init__.py index eb92efcd..8b822261 100644 --- a/src/tarski/fstrips/hybrid/__init__.py +++ b/src/tarski/fstrips/hybrid/__init__.py @@ -1,4 +1,5 @@ -from . problem import HybridProblem as Problem -from .. action import Action -from . reaction import Reaction -from . differential_constraints import DifferentialConstraint +from .differential_constraints import DifferentialConstraint +from .problem import HybridProblem as Problem +from .reaction import Reaction + +__all__ = ['Problem', 'Reaction', 'DifferentialConstraint'] diff --git a/src/tarski/fstrips/hybrid/differential_constraints.py b/src/tarski/fstrips/hybrid/differential_constraints.py index 5070e419..60bfadd4 100644 --- a/src/tarski/fstrips/hybrid/differential_constraints.py +++ b/src/tarski/fstrips/hybrid/differential_constraints.py @@ -1,4 +1,3 @@ - from ...syntax import BuiltinFunctionSymbol, CompoundTerm from . import errors as err @@ -24,7 +23,7 @@ def _check_well_formed(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.name, params) + return f'{self.name}({params})' def dump(self): return dict(name=self.name, @@ -34,8 +33,8 @@ def dump(self): ode=self.ode.dump()) def __str__(self): - tokens = ['reaction {}:'.format(self.name), - 'cond: ({})'.format(self.condition), - 'variate: {}'.format(self.variate), - 'ode: {}'.format(self.ode)] + tokens = [f'reaction {self.name}:', + f'cond: ({self.condition})', + f'variate: {self.variate}', + f'ode: {self.ode}'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/hybrid/errors.py b/src/tarski/fstrips/hybrid/errors.py index 7376a4e1..542a02cb 100644 --- a/src/tarski/fstrips/hybrid/errors.py +++ b/src/tarski/fstrips/hybrid/errors.py @@ -1,5 +1,4 @@ - -from ... errors import DuplicateDefinition, UndefinedElement, SyntacticError +from ...errors import DuplicateDefinition, SyntacticError, UndefinedElement class DuplicateReactionDefinition(DuplicateDefinition): diff --git a/src/tarski/fstrips/hybrid/problem.py b/src/tarski/fstrips/hybrid/problem.py index 881ac3d7..88b24ab3 100644 --- a/src/tarski/fstrips/hybrid/problem.py +++ b/src/tarski/fstrips/hybrid/problem.py @@ -1,11 +1,10 @@ - from collections import OrderedDict +from .. import fstrips as fs from ..problem import Problem -from .reaction import Reaction -from .differential_constraints import DifferentialConstraint from . import errors as err -from .. import fstrips as fs +from .differential_constraints import DifferentialConstraint +from .reaction import Reaction class HybridProblem(Problem): @@ -58,12 +57,12 @@ def get_symbols(self, pv, ev, cv): for yk in eff.lhs[:, 0]: ev.visit(yk) else: - raise RuntimeError("Effect type '{}' cannot be analysed".format(type(eff))) + raise RuntimeError(f"Effect type '{type(eff)}' cannot be analysed") for _, dc in self.differential_constraints.items(): pv.visit(dc.condition) pv.visit(dc.variate) def __str__(self): - return 'FSTRIPS Hybrid Problem "{}", domain "{}"'.format(self.name, self.domain_name) + return f'FSTRIPS Hybrid Problem "{self.name}", domain "{self.domain_name}"' __repr__ = __str__ diff --git a/src/tarski/fstrips/hybrid/reaction.py b/src/tarski/fstrips/hybrid/reaction.py index 8dcc3121..7cbafa95 100644 --- a/src/tarski/fstrips/hybrid/reaction.py +++ b/src/tarski/fstrips/hybrid/reaction.py @@ -1,4 +1,3 @@ - class Reaction: """ A (possibly lifted) reaction """ @@ -15,7 +14,7 @@ def _check_well_formed(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.name, params) + return f'{self.name}({params})' def dump(self): return dict(name=self.name, @@ -24,7 +23,7 @@ def dump(self): effect=[eff.dump() for eff in self.effect.dump()]) def __str__(self): - tokens = ['reaction {}:'.format(self.name), - 'cond: ({})'.format(self.condition), - 'eff: ({})'.format(self.effect)] + tokens = [f'reaction {self.name}:', + f'cond: ({self.condition})', + f'eff: ({self.effect})'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/manipulation/__init__.py b/src/tarski/fstrips/manipulation/__init__.py index 4c192253..bf825092 100644 --- a/src/tarski/fstrips/manipulation/__init__.py +++ b/src/tarski/fstrips/manipulation/__init__.py @@ -1,2 +1,3 @@ - from .simplify import Simplify + +__all__ = ['Simplify'] diff --git a/src/tarski/fstrips/manipulation/simplify.py b/src/tarski/fstrips/manipulation/simplify.py index 64830d9a..fd066fad 100644 --- a/src/tarski/fstrips/manipulation/simplify.py +++ b/src/tarski/fstrips/manipulation/simplify.py @@ -1,19 +1,20 @@ import copy +from functools import singledispatchmethod -from multipledispatch import dispatch # type: ignore - -from ..fstrips import AddEffect, DelEffect, UniversalEffect, FunctionalEffect -from ..ops import collect_all_symbols, compute_number_potential_groundings from ...evaluators.simple import evaluate from ...grounding.ops import approximate_symbol_fluency -from ...syntax.terms import Constant, Variable, CompoundTerm -from ...syntax.formulas import CompoundFormula, QuantifiedFormula, Atom, Tautology, Contradiction, Connective, is_neg, \ - Quantifier, unwrap_conjunction_or_atom, is_eq_atom, land, exists +from ...syntax import symref +from ...syntax.formulas import (Atom, CompoundFormula, Connective, + Contradiction, QuantifiedFormula, Quantifier, + Tautology, exists, is_eq_atom, is_neg, land, + unwrap_conjunction_or_atom) +from ...syntax.ops import flatten +from ...syntax.terms import CompoundTerm, Constant, Variable from ...syntax.transform.substitutions import substitute_expression from ...syntax.util import get_symbols from ...syntax.walker import FOLWalker -from ...syntax.ops import flatten -from ...syntax import symref +from ..fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect +from ..ops import collect_all_symbols, compute_number_potential_groundings def bool_to_expr(val): @@ -197,12 +198,12 @@ def simplify_existential_quantification(node, inplace=True): class ExistentialQuantificationSimplifier(FOLWalker): """ Replaces a formula of the form ∃x.φ[x] ∧ x = t by the formula φ[x/t]. """ - @dispatch(object) - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @singledispatchmethod + def visit(self, node): return self.default_handler(node) - @dispatch(QuantifiedFormula) # type: ignore - def visit(self, node: QuantifiedFormula): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: QuantifiedFormula): if node.quantifier == Quantifier.Forall: return node diff --git a/src/tarski/fstrips/manipulation/types.py b/src/tarski/fstrips/manipulation/types.py index f645778f..839c031c 100644 --- a/src/tarski/fstrips/manipulation/types.py +++ b/src/tarski/fstrips/manipulation/types.py @@ -1,4 +1,3 @@ - # import copy # TODO - Work in Progress diff --git a/src/tarski/fstrips/ops.py b/src/tarski/fstrips/ops.py index 89588364..e251e120 100644 --- a/src/tarski/fstrips/ops.py +++ b/src/tarski/fstrips/ops.py @@ -1,17 +1,12 @@ import operator -from functools import reduce +from functools import reduce, singledispatchmethod from typing import Set, Union -# At the moment we're using the "multipledispatch" package to implement single-argument dispatching. -# Whenever we move to support Python 3.8+, we could directly use -# https://docs.python.org/3/library/functools.html#functools.singledispatchmethod -from multipledispatch import dispatch # type: ignore - -from .walker import ProblemWalker -from ..syntax import Predicate, Function, CompoundTerm, Atom -from .problem import Problem +from ..syntax import Atom, CompoundTerm, Function, Predicate from . import fstrips as fs from .derived import Derived +from .problem import Problem +from .walker import ProblemWalker def collect_all_symbols(problem: Problem, include_builtin=False) -> Set[Union[Predicate, Function]]: @@ -26,7 +21,7 @@ def collect_all_symbols(problem: Problem, include_builtin=False) -> Set[Union[Pr walker.run(problem) if include_builtin: return walker.symbols - return set(s for s in walker.symbols if not s.builtin) + return {s for s in walker.symbols if not s.builtin} def collect_affected_symbols(problem: Problem) -> Set[Union[Predicate, Function]]: @@ -45,17 +40,17 @@ def __init__(self): super().__init__() self.symbols = set() - @dispatch(object) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 + @singledispatchmethod + def visit(self, node): return self.default_handler(node) - @dispatch(CompoundTerm) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: CompoundTerm): self.symbols.add(node.symbol) return node - @dispatch(Atom) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: Atom): self.symbols.add(node.symbol) return node @@ -66,37 +61,38 @@ def __init__(self): super().__init__() self.symbols = set() - @dispatch(object) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @singledispatchmethod + def visit(self, node): + # raise NotImplementedError(f'Cannot negate node {node} with type "{type(node)}"') return self.default_handler(node) - @dispatch(fs.AddEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.AddEffect): self.symbols.add(node.atom.symbol) return node - @dispatch(fs.DelEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.DelEffect): self.symbols.add(node.atom.symbol) return node - @dispatch(fs.FunctionalEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.FunctionalEffect): self.symbols.add(node.lhs.symbol) return node - @dispatch(fs.ChoiceEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.ChoiceEffect): self.symbols.add(node.obj.symbol) return node - @dispatch(fs.LinearEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.LinearEffect): self.symbols.update(lhs.symbol for lhs in node.y[:, 0]) return node - @dispatch(Derived) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: Derived): self.symbols.update(node.predicate) return node diff --git a/src/tarski/fstrips/problem.py b/src/tarski/fstrips/problem.py index 321b81c9..c112d051 100644 --- a/src/tarski/fstrips/problem.py +++ b/src/tarski/fstrips/problem.py @@ -1,11 +1,10 @@ - from collections import OrderedDict from .. import model +from . import errors as err +from . import fstrips as fs from .action import Action from .derived import Derived -from . import fstrips as fs -from . import errors as err class Problem: @@ -105,7 +104,8 @@ def create_fstrips_problem(language, problem_name=None, domain_name=None, evalua problem.language = language if evaluator is None: - from tarski.evaluators.simple import evaluate as evaluator # pylint: disable=import-outside-toplevel + from tarski.evaluators.simple import \ + evaluate as evaluator # pylint: disable=import-outside-toplevel problem.init = model.create(language, evaluator) return problem diff --git a/src/tarski/fstrips/representation.py b/src/tarski/fstrips/representation.py index 0f9eead5..3ab03031 100644 --- a/src/tarski/fstrips/representation.py +++ b/src/tarski/fstrips/representation.py @@ -1,19 +1,23 @@ import copy -from typing import Set, Union, Tuple, Optional, List +from typing import List, Optional, Set, Tuple, Union from ..errors import TarskiError -from .problem import Problem -from . import fstrips as fs -from ..syntax import Formula, CompoundTerm, Atom, CompoundFormula, QuantifiedFormula, is_and, is_neg, exists, symref,\ - VariableBinding, Constant, Tautology, land, Term -from ..syntax.ops import collect_unique_nodes, flatten, free_variables, all_variables +from ..fstrips import (AddEffect, BaseEffect, DelEffect, FunctionalEffect, + LiteralEffect, SingleEffect, UniversalEffect) +from ..syntax import (Atom, CompoundFormula, CompoundTerm, Constant, Formula, + QuantifiedFormula, Tautology, Term, VariableBinding, + exists, is_and, is_neg, land, symref) +from ..syntax.ops import (all_variables, collect_unique_nodes, flatten, + free_variables) from ..syntax.sorts import compute_signature_bindings from ..syntax.symrefs import TermReference from ..syntax.transform.substitutions import enumerate_substitutions -from ..syntax.transform.substitutions import substitute_expression as fol_substitute_expression +from ..syntax.transform.substitutions import \ + substitute_expression as fol_substitute_expression from ..syntax.util import get_symbols -from ..fstrips import AddEffect, DelEffect, LiteralEffect, FunctionalEffect, UniversalEffect, BaseEffect, SingleEffect +from . import fstrips as fs from .action import Action +from .problem import Problem class RepresentationError(TarskiError): @@ -368,7 +372,7 @@ def identify_cost_related_functions(problem: Problem) -> Set[str]: for effect in action.effects: mark_cost_unrelated_functions_in_effect(effect, related_to_non_cost_effects) - return set(f.name for f in functions if f.name not in related_to_non_cost_effects) + return {f.name for f in functions if f.name not in related_to_non_cost_effects} def mark_cost_unrelated_functions_in_effect(effect, functions): diff --git a/src/tarski/fstrips/visitors.py b/src/tarski/fstrips/visitors.py index 2b946219..809b1137 100644 --- a/src/tarski/fstrips/visitors.py +++ b/src/tarski/fstrips/visitors.py @@ -4,11 +4,11 @@ """ from typing import Set +from ..syntax import symref +from ..syntax.formulas import Atom, CompoundFormula, QuantifiedFormula from ..syntax.symrefs import TermReference from ..syntax.temporal import ltl -from ..syntax.formulas import CompoundFormula, Atom, QuantifiedFormula from ..syntax.terms import CompoundTerm -from ..syntax import symref class FluentHeuristic: diff --git a/src/tarski/fstrips/walker.py b/src/tarski/fstrips/walker.py index 67f86b07..a40f9640 100644 --- a/src/tarski/fstrips/walker.py +++ b/src/tarski/fstrips/walker.py @@ -41,16 +41,10 @@ def __str__(self): class ProblemWalker: - """ This is an experimental implementation of a visitor pattern based on single-dispatch. - At the moment we're using the "multipledispatch" package to implement single-argument dispatching. - It's far from perfect; it requires that the subclass declares the following "default" method: - - >>> @dispatch(object) - >>> def visit(self, node): # pylint: disable-msg=E0102 - >>> return self.default_handler(node) - - Whenever we move to support Python 3.8+, we could directly use: - https://docs.python.org/3/library/functools.html#functools.singledispatchmethod + """ + This is an experimental implementation of a visitor pattern based on single-dispatch. + To use it, you need to subclass it and "overload" the `visit` function using the + `functools.singledispatchmethod` decorator, as it is done, for instance, in the class AllSymbolWalker. """ def __init__(self, raise_on_undefined=False): self.default_handler = self._raise if raise_on_undefined else self._donothing @@ -67,8 +61,11 @@ def _donothing(self, node): def run(self, expression, inplace=True): # Avoiding circular references: - from . import Action, BaseEffect, Problem # pylint: disable=import-outside-toplevel - from ..syntax import Formula, Term # pylint: disable=import-outside-toplevel # Avoiding circular references + from ..syntax import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Formula, Term) + from . import Action # pylint: disable=import-outside-toplevel + from . import BaseEffect, Problem + # Simply dispatch according to type expression = expression if inplace else copy.deepcopy(expression) if isinstance(expression, (Formula, Term)): @@ -101,7 +98,8 @@ def visit_action(self, node, inplace=False): return node def visit_effect(self, effect, inplace=True): - from . import AddEffect, DelEffect, UniversalEffect, FunctionalEffect # pylint: disable=import-outside-toplevel + from . import AddEffect # pylint: disable=import-outside-toplevel + from . import DelEffect, FunctionalEffect, UniversalEffect effect = effect if inplace else copy.deepcopy(effect) if isinstance(effect, (AddEffect, DelEffect)): @@ -122,8 +120,9 @@ def visit_effect(self, effect, inplace=True): return self.visit(effect) def visit_expression(self, node, inplace=True): - from ..syntax import CompoundFormula, QuantifiedFormula, Atom, Tautology, Contradiction, Constant, Variable,\ - CompoundTerm, IfThenElse # pylint: disable=import-outside-toplevel # Avoiding circular references + from ..syntax import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Atom, CompoundFormula, CompoundTerm, Constant, Contradiction, + IfThenElse, QuantifiedFormula, Tautology, Variable) node = node if inplace else copy.deepcopy(node) if isinstance(node, (Variable, Constant, Contradiction, Tautology)): diff --git a/src/tarski/funcsym/__init__.py b/src/tarski/funcsym/__init__.py index 289d4bec..e6206c05 100644 --- a/src/tarski/funcsym/__init__.py +++ b/src/tarski/funcsym/__init__.py @@ -1,4 +1,3 @@ - from .. import modules @@ -6,13 +5,12 @@ def impl(symbol): """ """ if symbol in {"erf", "erfc"}: - sci = modules.import_scipy_special() return { - "erf": sci.erf, - "erfc": sci.erfc, + "erf": modules.scipy_special.erf, # lazily import scipy.special + "erfc": modules.scipy_special.erfc, }.get(symbol) - np = modules.import_numpy() + np = modules.numpy # lazily import numpy return { "min": lambda x, y: np.min((x, y)), "max": lambda x, y: np.max((x, y)), diff --git a/src/tarski/grounding/__init__.py b/src/tarski/grounding/__init__.py index c0726ccb..f277e8c6 100644 --- a/src/tarski/grounding/__init__.py +++ b/src/tarski/grounding/__init__.py @@ -1,4 +1,10 @@ - -from .naive_grounding import ProblemGrounding, create_all_possible_state_variables, \ - NaiveGroundingStrategy from .lp_grounding import LPGroundingStrategy +from .naive_grounding import (NaiveGroundingStrategy, ProblemGrounding, + create_all_possible_state_variables) + +__all__ = [ + 'create_all_possible_state_variables', + 'NaiveGroundingStrategy', + 'LPGroundingStrategy', + 'ProblemGrounding' +] diff --git a/src/tarski/grounding/common.py b/src/tarski/grounding/common.py index ffbd50ce..77234d7a 100644 --- a/src/tarski/grounding/common.py +++ b/src/tarski/grounding/common.py @@ -1,5 +1,6 @@ from ..errors import TarskiError -from ..syntax import Predicate, Function, Constant, termlists_are_equal, termlist_hash +from ..syntax import (Constant, Function, Predicate, termlist_hash, + termlists_are_equal) class StateVariableLite: diff --git a/src/tarski/grounding/errors.py b/src/tarski/grounding/errors.py index af44e3ce..d9daf834 100644 --- a/src/tarski/grounding/errors.py +++ b/src/tarski/grounding/errors.py @@ -1,11 +1,10 @@ - from ..errors import TarskiError class UnableToGroundError(TarskiError): def __init__(self, sym, msg=None): msg = msg or 'Reason unspecified' - super().__init__('Unable to ground Term/Atom "{}": {}'.format(sym, msg)) + super().__init__(f'Unable to ground Term/Atom "{sym}": {msg}') class ReachabilityLPUnsolvable(TarskiError): diff --git a/src/tarski/grounding/lp_grounding.py b/src/tarski/grounding/lp_grounding.py index fb602afa..d7a5804b 100644 --- a/src/tarski/grounding/lp_grounding.py +++ b/src/tarski/grounding/lp_grounding.py @@ -1,13 +1,13 @@ """ Classes and methods related to the Logic-Program based grounding strategy of planning problems. """ -from ..utils.command import silentremove from ..grounding.ops import approximate_symbol_fluency -from ..reachability import create_reachability_lp, run_clingo, parse_model +from ..reachability import create_reachability_lp, parse_model, run_clingo from ..reachability.asp import GOAL -from .errors import ReachabilityLPUnsolvable from ..util import SymbolIndex +from ..utils.command import silentremove from .common import StateVariableLite +from .errors import ReachabilityLPUnsolvable class LPGroundingStrategy: @@ -77,7 +77,7 @@ def _solve_lp(self): return self.model def __str__(self): - return 'LPGroundingStrategy["{}"]'.format(self.problem.name) + return f'LPGroundingStrategy["{self.problem.name}"]' __repr__ = __str__ @@ -89,7 +89,8 @@ def compute_action_groundings(problem, include_variable_inequalities=False): def ground_problem_schemas_into_plain_operators(problem, include_variable_inequalities=False): # pylint: disable=import-outside-toplevel - from ..syntax.transform.action_grounding import ground_schema_into_plain_operator_from_grounding + from ..syntax.transform.action_grounding import \ + ground_schema_into_plain_operator_from_grounding action_groundings = compute_action_groundings(problem, include_variable_inequalities) operators = [] for action_name, groundings in action_groundings.items(): diff --git a/src/tarski/grounding/naive/constraints.py b/src/tarski/grounding/naive/constraints.py index b3c6079b..cf0f48fe 100644 --- a/src/tarski/grounding/naive/constraints.py +++ b/src/tarski/grounding/naive/constraints.py @@ -1,10 +1,10 @@ - import itertools -from ...syntax import QuantifiedFormula, Quantifier, create_substitution, substitute_expression -from ...syntax.transform import NegatedBuiltinAbsorption, CNFTransformation,\ - QuantifierEliminationMode, remove_quantifiers +from ...syntax import (QuantifiedFormula, Quantifier, create_substitution, + substitute_expression) from ...syntax.ops import all_variables +from ...syntax.transform import (CNFTransformation, NegatedBuiltinAbsorption, + QuantifierEliminationMode, remove_quantifiers) from ...util import SymbolIndex from . import instantiation @@ -20,7 +20,7 @@ def __init__(self, prob, index): self.constraints_generated = 0 def __str__(self): - return 'Constraints Generated: {}'.format(self.constraints_generated) + return f'Constraints Generated: {self.constraints_generated}' def calculate_constraints(self): diff --git a/src/tarski/grounding/naive/diff_constraints.py b/src/tarski/grounding/naive/diff_constraints.py index a75fea92..8a2d4b93 100644 --- a/src/tarski/grounding/naive/diff_constraints.py +++ b/src/tarski/grounding/naive/diff_constraints.py @@ -1,4 +1,3 @@ - import itertools from ...fstrips import hybrid @@ -18,7 +17,7 @@ def __init__(self, prob, index): self.differential_constraints_generated = 0 def __str__(self): - return 'Reactions generated: {}'.format(self.differential_constraints_generated) + return f'Reactions generated: {self.differential_constraints_generated}' def calculate_constraints(self): diff --git a/src/tarski/grounding/naive/instantiation.py b/src/tarski/grounding/naive/instantiation.py index 5662f902..68969f75 100644 --- a/src/tarski/grounding/naive/instantiation.py +++ b/src/tarski/grounding/naive/instantiation.py @@ -1,4 +1,3 @@ - from ...syntax.terms import Constant, Variable from ..errors import UnableToGroundError diff --git a/src/tarski/grounding/naive/reactions.py b/src/tarski/grounding/naive/reactions.py index a2b9a7b5..6566d928 100644 --- a/src/tarski/grounding/naive/reactions.py +++ b/src/tarski/grounding/naive/reactions.py @@ -1,4 +1,3 @@ - import itertools from ...fstrips import hybrid @@ -19,7 +18,7 @@ def __init__(self, prob, index): self.reactions_generated = 0 def __str__(self): - return 'Reactions generated: {}'.format(self.reactions_generated) + return f'Reactions generated: {self.reactions_generated}' def calculate_reactions(self): diff --git a/src/tarski/grounding/naive/sensors.py b/src/tarski/grounding/naive/sensors.py index 27f9a28b..f9d33204 100644 --- a/src/tarski/grounding/naive/sensors.py +++ b/src/tarski/grounding/naive/sensors.py @@ -1,4 +1,3 @@ - import itertools from ...fstrips.contingent import Sensor @@ -18,7 +17,7 @@ def __init__(self, prob, index): self.sensors_generated = 0 def __str__(self): - return 'Sensors generated: {}'.format(self.sensors_generated) + return f'Sensors generated: {self.sensors_generated}' def calculate_sensors(self): # @TODO: this is pretty much the same code as that of grounding actions diff --git a/src/tarski/grounding/naive_grounding.py b/src/tarski/grounding/naive_grounding.py index 07e12e45..37f4a01b 100644 --- a/src/tarski/grounding/naive_grounding.py +++ b/src/tarski/grounding/naive_grounding.py @@ -4,14 +4,15 @@ import itertools from typing import List -from ..grounding.ops import approximate_symbol_fluency -from ..syntax import Constant, Variable, CompoundTerm, Atom, create_substitution, termlists_are_equal, termlist_hash from ..errors import DuplicateDefinition -from .errors import UnableToGroundError -from .common import StateVariableLite +from ..fstrips.visitors import FluentHeuristic, FluentSymbolCollector +from ..grounding.ops import approximate_symbol_fluency +from ..syntax import (Atom, CompoundTerm, Constant, Variable, + create_substitution, termlist_hash, termlists_are_equal) from ..syntax.transform.substitutions import substitute_expression from ..util import SymbolIndex -from ..fstrips.visitors import FluentSymbolCollector, FluentHeuristic +from .common import StateVariableLite +from .errors import UnableToGroundError class ProblemGrounding: @@ -138,7 +139,7 @@ def ground_actions(self): return groundings def __str__(self): - return 'NaiveGroundingStrategy["{}"]'.format(self.problem.name) + return f'NaiveGroundingStrategy["{self.problem.name}"]' __repr__ = __str__ diff --git a/src/tarski/grounding/ops.py b/src/tarski/grounding/ops.py index add93bbd..4b9bf9c6 100644 --- a/src/tarski/grounding/ops.py +++ b/src/tarski/grounding/ops.py @@ -1,6 +1,5 @@ - -from ..syntax.util import get_symbols from ..fstrips.ops import collect_affected_symbols +from ..syntax.util import get_symbols def approximate_symbol_fluency(problem, include_builtin=False): diff --git a/src/tarski/io/__init__.py b/src/tarski/io/__init__.py index 33225f0c..f900bcab 100644 --- a/src/tarski/io/__init__.py +++ b/src/tarski/io/__init__.py @@ -1,6 +1,7 @@ - from .fstrips import FstripsReader, FstripsWriter from .utils import find_domain_filename # Just a shortcut, turns out they're both the same! :-) PDDLReader = FstripsReader + +__all__ = ['FstripsReader', 'FstripsWriter', 'PDDLReader', 'find_domain_filename'] diff --git a/src/tarski/io/_fstrips/common.py b/src/tarski/io/_fstrips/common.py index ab28ec0c..5cfec3fd 100644 --- a/src/tarski/io/_fstrips/common.py +++ b/src/tarski/io/_fstrips/common.py @@ -1,9 +1,9 @@ +from ... import theories from ...errors import TarskiError from ...fstrips import FunctionalEffect from ...fstrips.action import AdditiveActionCost, generate_zero_action_cost from ...fstrips.representation import is_typed_problem -from ...syntax import Interval, CompoundTerm, Tautology, BuiltinFunctionSymbol -from ... import theories +from ...syntax import BuiltinFunctionSymbol, CompoundTerm, Interval, Tautology from ...syntax.util import get_symbols from ...theories import Theory diff --git a/src/tarski/io/_fstrips/parser/lexer.py b/src/tarski/io/_fstrips/parser/lexer.py index 500531ae..65eadf2e 100644 --- a/src/tarski/io/_fstrips/parser/lexer.py +++ b/src/tarski/io/_fstrips/parser/lexer.py @@ -1,8 +1,9 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 -from antlr4 import * +import sys from io import StringIO from typing.io import TextIO -import sys + +from antlr4 import * def serializedATN(): @@ -636,7 +637,7 @@ class fstripsLexer(Lexer): K_PRECONDITION = 90 K_EFFECT = 91 - channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ] modeNames = [ "DEFAULT_MODE" ] diff --git a/src/tarski/io/_fstrips/parser/listener.py b/src/tarski/io/_fstrips/parser/listener.py index 63dd4e21..9b87c926 100644 --- a/src/tarski/io/_fstrips/parser/listener.py +++ b/src/tarski/io/_fstrips/parser/listener.py @@ -1,5 +1,6 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 from antlr4 import * + if __name__ is not None and "." in __name__: from .parser import fstripsParser else: diff --git a/src/tarski/io/_fstrips/parser/parser.py b/src/tarski/io/_fstrips/parser/parser.py index f29a2cc3..c716269f 100644 --- a/src/tarski/io/_fstrips/parser/parser.py +++ b/src/tarski/io/_fstrips/parser/parser.py @@ -1,9 +1,10 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 -# encoding: utf-8 -from antlr4 import * +import sys from io import StringIO from typing.io import TextIO -import sys + +from antlr4 import * + def serializedATN(): with StringIO() as buf: @@ -1935,7 +1936,7 @@ def typename(self): self.state = 269 self._errHandler.sync(self) _la = self._input.LA(1) - if not (((((_la - 79)) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (fstripsParser.INT_T - 79)) | (1 << (fstripsParser.FLOAT_T - 79)) | (1 << (fstripsParser.OBJECT_T - 79)) | (1 << (fstripsParser.NUMBER_T - 79)) | (1 << (fstripsParser.NAME - 79)))) != 0)): + if not (((_la - 79) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (fstripsParser.INT_T - 79)) | (1 << (fstripsParser.FLOAT_T - 79)) | (1 << (fstripsParser.OBJECT_T - 79)) | (1 << (fstripsParser.NUMBER_T - 79)) | (1 << (fstripsParser.NAME - 79)))) != 0): break self.state = 271 @@ -3642,7 +3643,7 @@ def atomicTermFormula(self): self.state = 444 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or ((((_la - 83)) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): + while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or (((_la - 83) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): self.state = 441 self.term() self.state = 446 @@ -3970,7 +3971,7 @@ def functionTerm(self): self.state = 461 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or ((((_la - 83)) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): + while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or (((_la - 83) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): self.state = 458 self.term() self.state = 463 @@ -4659,7 +4660,7 @@ def builtin_binary_function(self): self.enterOuterAlt(localctx, 1) self.state = 551 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__18) | (1 << fstripsParser.T__19) | (1 << fstripsParser.T__20) | (1 << fstripsParser.T__21) | (1 << fstripsParser.T__22) | (1 << fstripsParser.T__23))) != 0)): + if not(((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__18) | (1 << fstripsParser.T__19) | (1 << fstripsParser.T__20) | (1 << fstripsParser.T__21) | (1 << fstripsParser.T__22) | (1 << fstripsParser.T__23))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -4708,7 +4709,7 @@ def builtin_unary_function(self): self.enterOuterAlt(localctx, 1) self.state = 553 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__24) | (1 << fstripsParser.T__25) | (1 << fstripsParser.T__26) | (1 << fstripsParser.T__27) | (1 << fstripsParser.T__28) | (1 << fstripsParser.T__29) | (1 << fstripsParser.T__30) | (1 << fstripsParser.T__31) | (1 << fstripsParser.T__32))) != 0)): + if not(((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__24) | (1 << fstripsParser.T__25) | (1 << fstripsParser.T__26) | (1 << fstripsParser.T__27) | (1 << fstripsParser.T__28) | (1 << fstripsParser.T__29) | (1 << fstripsParser.T__30) | (1 << fstripsParser.T__31) | (1 << fstripsParser.T__32))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -4757,7 +4758,7 @@ def builtin_binary_predicate(self): self.enterOuterAlt(localctx, 1) self.state = 555 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__33) | (1 << fstripsParser.T__34) | (1 << fstripsParser.T__35) | (1 << fstripsParser.T__36) | (1 << fstripsParser.T__37))) != 0)): + if not(((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__33) | (1 << fstripsParser.T__34) | (1 << fstripsParser.T__35) | (1 << fstripsParser.T__36) | (1 << fstripsParser.T__37))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -4817,7 +4818,7 @@ def assignOp(self): self.enterOuterAlt(localctx, 1) self.state = 557 _la = self._input.LA(1) - if not(((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (fstripsParser.K_INCREASE - 75)) | (1 << (fstripsParser.K_DECREASE - 75)) | (1 << (fstripsParser.K_SCALEUP - 75)) | (1 << (fstripsParser.K_SCALEDOWN - 75)))) != 0)): + if not(((_la - 75) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (fstripsParser.K_INCREASE - 75)) | (1 << (fstripsParser.K_DECREASE - 75)) | (1 << (fstripsParser.K_SCALEUP - 75)) | (1 << (fstripsParser.K_SCALEDOWN - 75)))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) diff --git a/src/tarski/io/_fstrips/parser/visitor.py b/src/tarski/io/_fstrips/parser/visitor.py index e61738b6..d6acf522 100644 --- a/src/tarski/io/_fstrips/parser/visitor.py +++ b/src/tarski/io/_fstrips/parser/visitor.py @@ -1,5 +1,6 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 from antlr4 import * + if __name__ is not None and "." in __name__: from .parser import fstripsParser else: diff --git a/src/tarski/io/_fstrips/reader.py b/src/tarski/io/_fstrips/reader.py index ac46ffb1..ca2ff482 100644 --- a/src/tarski/io/_fstrips/reader.py +++ b/src/tarski/io/_fstrips/reader.py @@ -5,19 +5,22 @@ import copy import logging -from antlr4 import FileStream, CommonTokenStream, InputStream +from antlr4 import CommonTokenStream, FileStream, InputStream from antlr4.error.ErrorListener import ErrorListener -from .common import parse_number, process_requirements, create_sort, process_cost_effects, LowerCasingStreamWrapper from ...errors import SyntacticError -from ...fstrips import DelEffect, AddEffect, FunctionalEffect, UniversalEffect, OptimizationMetric, OptimizationType -from ...syntax import CompoundFormula, Connective, neg, Tautology, implies, exists, forall, Term, Interval -from ...syntax.builtins import get_predicate_from_symbol, get_function_from_symbol +from ...fstrips import (AddEffect, DelEffect, FunctionalEffect, + OptimizationMetric, OptimizationType, UniversalEffect) +from ...syntax import (CompoundFormula, Connective, Interval, Tautology, Term, + exists, forall, implies, neg) +from ...syntax.builtins import (get_function_from_symbol, + get_predicate_from_symbol) from ...syntax.formulas import VariableBinding - -from .parser.visitor import fstripsVisitor +from .common import (LowerCasingStreamWrapper, create_sort, parse_number, + process_cost_effects, process_requirements) from .parser.lexer import fstripsLexer from .parser.parser import fstripsParser +from .parser.visitor import fstripsVisitor class FStripsParser(fstripsVisitor): @@ -159,7 +162,7 @@ def visitTypeBoundsDefinition(self, ctx): typename = ctx.NAME().getText().lower() sort = self.language.get_sort(typename) if not isinstance(sort, Interval): - raise ParsingError("Attempt at bounding symbolic non-interval sort '{}'".format(sort)) + raise ParsingError(f"Attempt at bounding symbolic non-interval sort '{sort}'") # Encode the bounds and set them into the sort lower = sort.encode(ctx.NUMBER(0).getText()) @@ -221,7 +224,7 @@ def visitTermNumber(self, ctx): def _recover_variable_from_context(self, name): if self.current_binding is None: - raise ParsingError("Variable '{}' used declared outside variable binding".format(name)) + raise ParsingError(f"Variable '{name}' used declared outside variable binding") return self.current_binding.get(name) @@ -459,7 +462,7 @@ def __init__(self, component, value): self.value = value def __str__(self): - return 'in {} found undeclared variable {}'.format(self.component, repr(self.value)) + return f'in {self.component} found undeclared variable {repr(self.value)}' class ParserVariableContext: diff --git a/src/tarski/io/common.py b/src/tarski/io/common.py index bb5fc050..1372925c 100644 --- a/src/tarski/io/common.py +++ b/src/tarski/io/common.py @@ -1,9 +1,8 @@ - import os _CURRENT_DIR_ = os.path.dirname(os.path.realpath(__file__)) def load_tpl(name): - with open(os.path.join(_CURRENT_DIR_, "templates", name), 'r', encoding='utf8') as file: + with open(os.path.join(_CURRENT_DIR_, "templates", name), encoding='utf8') as file: return file.read() diff --git a/src/tarski/io/fstrips.py b/src/tarski/io/fstrips.py index 5de5ed61..c2c5e482 100644 --- a/src/tarski/io/fstrips.py +++ b/src/tarski/io/fstrips.py @@ -1,24 +1,23 @@ import logging from collections import defaultdict -from typing import Optional, List +from typing import List, Optional +from ..fstrips import (AddEffect, DelEffect, FunctionalEffect, IncreaseEffect, + UniversalEffect, create_fstrips_problem, language) from ..fstrips.action import AdditiveActionCost -from ..theories import load_theory, Theory -from .common import load_tpl from ..model import ExtensionalFunctionDefinition -from ..syntax import Tautology, Contradiction, Atom, CompoundTerm, CompoundFormula, QuantifiedFormula, \ - Term, Variable, Constant, Formula, symref, BuiltinPredicateSymbol -from ..syntax.sorts import parent, Interval, ancestors - -from ._fstrips.common import tarski_to_pddl_type, get_requirements_string, create_number_type, uniformize_costs -from ..fstrips import create_fstrips_problem, language, FunctionalEffect, AddEffect, DelEffect, IncreaseEffect,\ - UniversalEffect - -from ._fstrips.reader import FStripsParser - +from ..syntax import (Atom, BuiltinPredicateSymbol, CompoundFormula, + CompoundTerm, Constant, Contradiction, Formula, + QuantifiedFormula, Tautology, Term, Variable, symref) +from ..syntax.sorts import Interval, ancestors, parent +from ..theories import Theory, load_theory +from ._fstrips.common import (create_number_type, get_requirements_string, + tarski_to_pddl_type, uniformize_costs) # Leave the next import so that it can be imported from the outside without warnings of importing a private module # pylint: disable=unused-import -from ._fstrips.reader import ParsingError +from ._fstrips.reader import ParsingError # noqa: F401 +from ._fstrips.reader import FStripsParser +from .common import load_tpl class FstripsReader: @@ -50,7 +49,7 @@ def read_problem(self, domain, instance): return self.problem def parse_file(self, filename, start_rule): - logging.debug('Parsing filename "{}" from grammar rule "{}"'.format(filename, start_rule)) + logging.debug(f'Parsing filename "{filename}" from grammar rule "{start_rule}"') domain_parse_tree, _ = self.parser.parse_file(filename, start_rule) self.parser.visit(domain_parse_tree) @@ -71,7 +70,7 @@ def parse_instance_string(self, instance): return self.problem def parse_string(self, string, start_rule): - logging.debug('Parsing custom string from grammar rule "{}"'.format(start_rule)) + logging.debug(f'Parsing custom string from grammar rule "{start_rule}"') parse_tree, _ = self.parser.parse_string(string, start_rule) logging.debug("Processing AST") return self.parser.visit(parse_tree) @@ -104,7 +103,7 @@ def print_objects(constants): elements = [] for sort in sorted(constants_by_sort.keys()): sobjects = " ".join(sorted(constants_by_sort[sort])) - elements.append("{} - {}".format(sobjects, sort)) + elements.append(f"{sobjects} - {sort}") return linebreaks(elements, indentation=2, indent_first=False) @@ -126,14 +125,14 @@ def print_init(problem): continue # Ignore intensionally defined symbols fname = signature[0] for point, value in definition.data.items(): - elements.append("(= ({} {}) {})".format(fname, print_term_ref_list(point), value)) + elements.append(f"(= ({fname} {print_term_ref_list(point)}) {value})") # e.g. (clear b1) for signature, definition in problem.init.predicate_extensions.items(): assert isinstance(definition, set) predname = signature[0] for point in definition: - elements.append("({} {})".format(predname, print_term_ref_list(point))) + elements.append(f"({predname} {print_term_ref_list(point)})") return linebreaks(elements, indentation=2, indent_first=False) @@ -149,7 +148,7 @@ def print_domain_bounds(problem): if not sort.builtin and isinstance(sort, Interval): assert lang.has_sort('Integer') if lang.Integer in ancestors(sort): - bounds.append("({} - int[{}..{}])".format(sort.name, sort.lower_bound, sort.upper_bound)) + bounds.append(f"({sort.name} - int[{sort.lower_bound}..{sort.upper_bound}])") elif lang.Real in ancestors(sort): pass # TODO @@ -157,7 +156,7 @@ def print_domain_bounds(problem): return "" inner = "\n".join(indent(b, 2) for b in bounds) - return "(:bounds\n{})".format(inner) + return f"(:bounds\n{inner})" def print_problem_constraints(problem): @@ -261,7 +260,7 @@ def get_functions(self): continue # Don't declare builtin elements domain_str = build_signature_string(fun.domain) codomain_str = tarski_to_pddl_type(fun.codomain) - res.append("({} {}) - {}".format(fun.symbol, domain_str, codomain_str)) + res.append(f"({fun.symbol} {domain_str}) - {codomain_str}") return ("\n" + _TAB * 2).join(res) def get_predicates(self): @@ -270,7 +269,7 @@ def get_predicates(self): if fun.builtin: continue # Don't declare builtin elements domain_str = build_signature_string(fun.sort) - res.append("({} {})".format(fun.symbol, domain_str)) + res.append(f"({fun.symbol} {domain_str})") return ("\n" + _TAB * 2).join(res) def get_actions(self): @@ -322,13 +321,13 @@ def print_formula(formula, indentation=0): elif isinstance(formula, Atom): return print_atom(formula) elif isinstance(formula, CompoundFormula): - return "({} {})".format(formula.connective, print_formula_list(formula.subformulas)) + return f"({formula.connective} {print_formula_list(formula.subformulas)})" elif isinstance(formula, QuantifiedFormula): vars_ = print_variable_list(formula.variables) # e.g. (exists (?x - object) (and (= ?x 2))) - return '({} ({}) {})'.format(formula.quantifier, vars_, print_formula(formula.formula)) - raise RuntimeError("Unexpected element type: {}".format(formula)) + return f'({formula.quantifier} ({vars_}) {print_formula(formula.formula)})' + raise RuntimeError(f"Unexpected element type: {formula}") def print_effects(effects, cost=None, indentation=0): @@ -347,19 +346,19 @@ def print_unconditional_effect(eff, indentation=0): increase = isinstance(eff, IncreaseEffect) if increase: - return indent("(increase {} {})".format(print_term(eff.lhs), print_term(eff.rhs)), indentation) + return indent(f"(increase {print_term(eff.lhs)} {print_term(eff.rhs)})", indentation) elif functional: - return indent("(assign {} {})".format(print_term(eff.lhs), print_term(eff.rhs)), indentation) + return indent(f"(assign {print_term(eff.lhs)} {print_term(eff.rhs)})", indentation) elif isinstance(eff, AddEffect): - return indent("{}".format(print_atom(eff.atom)), indentation) + return indent(f"{print_atom(eff.atom)}", indentation) elif isinstance(eff, DelEffect): - return indent("(not {})".format(print_atom(eff.atom)), indentation) + return indent(f"(not {print_atom(eff.atom)})", indentation) elif isinstance(eff, UniversalEffect): effect_str = (print_effect(eff.effects[0]) if len(eff.effects) == 1 else print_effects(eff.effects)) - return indent("(forall ({}) {})".format(print_variable_list(eff.variables), effect_str), + return indent(f"(forall ({print_variable_list(eff.variables)}) {effect_str})", indentation) - raise RuntimeError("Unexpected element type: {}".format(eff)) + raise RuntimeError(f"Unexpected element type: {eff}") def print_effect(eff, indentation=0): @@ -367,7 +366,7 @@ def print_effect(eff, indentation=0): if conditional: return indent( - "(when {} {})".format(print_formula(eff.condition), print_unconditional_effect(eff)), + f"(when {print_formula(eff.condition)} {print_unconditional_effect(eff)})", indentation) else: return print_unconditional_effect(eff, indentation) @@ -378,10 +377,10 @@ def print_term(term): if isinstance(term, Variable): return print_variable_name(term.symbol) elif isinstance(term, CompoundTerm): - return "({} {})".format(term.symbol.symbol, print_term_list(term.subterms)) + return f"({term.symbol.symbol} {print_term_list(term.subterms)})" elif isinstance(term, Constant): - return "{}".format(term.symbol) - raise RuntimeError("Unexpected element type: {}".format(term)) + return f"{term.symbol}" + raise RuntimeError(f"Unexpected element type: {term}") def print_atom(atom: Atom): diff --git a/src/tarski/io/pddl/__init__.py b/src/tarski/io/pddl/__init__.py index fa9e6e9e..e857df86 100644 --- a/src/tarski/io/pddl/__init__.py +++ b/src/tarski/io/pddl/__init__.py @@ -13,23 +13,23 @@ class Features(Enum): """PDDL 3.1 features""" - STRIPS = 0, - TYPING = 1, - NEGATIVE_PRECONDITIONS = 2, - DISJUNCTIVE_PRECONDITIONS = 3, - EQUALITY = 4, - EXISTENTIAL_PRECONDITIONS = 5, - UNIVERSAL_PRECONDITIONS = 6, - CONDITIONAL_EFFECTS = 7, - NUMERIC_FLUENTS = 8, - DURATIVE_ACTIONS = 9, - DURATION_INEQUALITIES = 10, - CONTINUOUS_EFFECTS = 11, - DERIVED_PREDICATES = 12, - TIMED_INITIAL_LITERALS = 13, - PREFERENCES = 14, - CONSTRAINTS = 15, - ACTION_COSTS = 16, + STRIPS = 0 + TYPING = 1 + NEGATIVE_PRECONDITIONS = 2 + DISJUNCTIVE_PRECONDITIONS = 3 + EQUALITY = 4 + EXISTENTIAL_PRECONDITIONS = 5 + UNIVERSAL_PRECONDITIONS = 6 + CONDITIONAL_EFFECTS = 7 + NUMERIC_FLUENTS = 8 + DURATIVE_ACTIONS = 9 + DURATION_INEQUALITIES = 10 + CONTINUOUS_EFFECTS = 11 + DERIVED_PREDICATES = 12 + TIMED_INITIAL_LITERALS = 13 + PREFERENCES = 14 + CONSTRAINTS = 15 + ACTION_COSTS = 16 OBJECT_FLUENTS = 17 diff --git a/src/tarski/io/pddl/errors.py b/src/tarski/io/pddl/errors.py index 2eecbc52..5fcb5b09 100644 --- a/src/tarski/io/pddl/errors.py +++ b/src/tarski/io/pddl/errors.py @@ -15,7 +15,7 @@ def __init__(self, line, msg): self.msg = msg def __str__(self): - return "Semantic Error: Line {}: {}".format(self.line, self.msg) + return f"Semantic Error: Line {self.line}: {self.msg}" class ParseError(Exception): @@ -26,7 +26,7 @@ def __init__(self, line, msg): self.msg = msg def __str__(self): - return "Parse Error: Line {}: {}".format(self.line, self.msg) + return f"Parse Error: Line {self.line}: {self.msg}" class UnsupportedFeature(Exception): @@ -37,4 +37,4 @@ def __init__(self, line, msg): self.msg = msg def __str__(self): - return "Unsupported PDDL feature: Line {}: {}".format(self.line, self.msg) + return f"Unsupported PDDL feature: Line {self.line}: {self.msg}" diff --git a/src/tarski/io/pddl/instance.py b/src/tarski/io/pddl/instance.py index 9d99afc9..69e11275 100644 --- a/src/tarski/io/pddl/instance.py +++ b/src/tarski/io/pddl/instance.py @@ -7,22 +7,21 @@ # PDDL parser # ---------------------------------------------------------------------------------------------------------------------- -from collections import namedtuple, OrderedDict -from typing import Tuple +from collections import OrderedDict, namedtuple from enum import Enum +from typing import Tuple import tarski as tsk from tarski.io.pddl.errors import UnsupportedFeature -from tarski.theories import Theory -from tarski.syntax import Variable, Sort +from tarski.syntax import Sort, Variable, symref from tarski.syntax.sorts import Interval, int_encode_fn -from tarski.syntax import symref - +from tarski.theories import Theory AssignmentEffectData = namedtuple('AssignmentEffectData', ['lhs', 'rhs']) EventData = namedtuple('EventData', ['pre', 'post']) ActionData = namedtuple('ActionData', ['name', 'parameters', 'pre', 'post']) -DurativeActionData = namedtuple('DurativeActionData', ['name', 'parameters', 'at_start', 'at_end', 'overall', 'duration']) +DurativeActionData = namedtuple('DurativeActionData', + ['name', 'parameters', 'at_start', 'at_end', 'overall', 'duration']) DerivedPredicateData = namedtuple('DerivedPredicateData', ['head', 'parameters', 'body']) ObjectiveData = namedtuple('ObjectiveData', ['mode', 'type', 'expr']) @@ -368,4 +367,4 @@ def process_objective_definition(self, objective_data): type=objective_data['definition']['type'], expr=objective_data['definition']['expr']) if self.debug: - print("Objective: mode: {} type: {} expr: {}".format(self.objective.mode, self.objective.type, self.objective.expr)) + print(f"Objective: mode: {self.objective.mode} type: {self.objective.type} expr: {self.objective.expr}") diff --git a/src/tarski/io/pddl/lexer.py b/src/tarski/io/pddl/lexer.py index b4d7e0c2..35c05a35 100644 --- a/src/tarski/io/pddl/lexer.py +++ b/src/tarski/io/pddl/lexer.py @@ -7,9 +7,10 @@ # PDDL tokenizer # ---------------------------------------------------------------------------------------------------------------------- -import ply.lex as lex # type: ignore import re +import ply.lex as lex # type: ignore + # helper definitions alpha = r'[A-Za-z]' num = r'[0-9]' @@ -236,7 +237,7 @@ def t_NAT(self, t): return t def t_error(self, t): - print("Illegal character: {} at line: {}".format(repr(t.value[0]), self._lexer.lineno)) + print(f"Illegal character: {repr(t.value[0])} at line: {self._lexer.lineno}") t.lexer.skip(1) def lineno(self): diff --git a/src/tarski/io/pddl/parser.py b/src/tarski/io/pddl/parser.py index 7f2ccc49..675fd9f6 100644 --- a/src/tarski/io/pddl/parser.py +++ b/src/tarski/io/pddl/parser.py @@ -11,11 +11,12 @@ from ply import yacc # type: ignore -from tarski.syntax import CompoundTerm, Term, land, lor, neg, QuantifiedFormula, Quantifier from tarski.io.pddl import Features, supported_features -from tarski.io.pddl.lexer import PDDLlex -from tarski.io.pddl.instance import * from tarski.io.pddl.errors import * +from tarski.io.pddl.instance import * +from tarski.io.pddl.lexer import PDDLlex +from tarski.syntax import (CompoundTerm, QuantifiedFormula, Quantifier, Term, + land, lor, neg) class PDDLparser: @@ -68,7 +69,7 @@ def parse(self, input_data): def _print_verbose(self, p_name): if self.verbose: - print('>> Parsed `{}` ...'.format(p_name)) + print(f'>> Parsed `{p_name}` ...') def p_begin(self, p): '''begin : domain @@ -122,7 +123,7 @@ def p_domain_ref(self, p): '''domain_ref : LPAREN rwDOMAIN_REF ID RPAREN''' expected_domain = p[3] if expected_domain != self.domain_name: - msg = "Domain and problem mismatch: expected domain name is '{}', provided domain is '{}'".format(expected_domain, self.domain_name) + msg = f"Domain and problem mismatch: expected domain name is '{expected_domain}', provided domain is '{self.domain_name}'" raise SemanticError(self.lexer.lineno(), msg) def p_domain_require_def(self, p): @@ -300,14 +301,14 @@ def p_constants_def(self, p): if isinstance(entry, tuple): typename, constant_list = entry if typename not in self.instance.types: - msg = "Error parsing (:constants ) section: type '{}' was not defined".format(typename) + msg = f"Error parsing (:constants ) section: type '{typename}' was not defined" raise SemanticError(self.lexer.lineno(), msg) self.instance.process_constant_definition(entry) total_constants += len(constant_list) else: - msg = "Error processing (:constants ) section: constant '{}' has no type attached".format(entry) + msg = f"Error processing (:constants ) section: constant '{entry}' has no type attached" raise SemanticError(self.lexer.lineno(), msg) if self.debug: print("Total constants defined:", total_constants) @@ -350,7 +351,7 @@ def normalize_typed_variable_list(self, unnorm_args): if token_type == 'type': var_type = token_value if var_type not in self.instance.types: - msg = "Error parsing list of typed variables: type '{}' is not defined".format(var_type) + msg = f"Error parsing list of typed variables: type '{var_type}' is not defined" raise SemanticError(self.lexer.lineno(), msg) for t2 in unnorm_args[last_index+1:i]: var_term, var_sort = self.instance.get_variable(t2[1], var_type) @@ -649,7 +650,7 @@ def p_GD(self, p): p[0] = lor(neg(p[3]), p[4]) elif p[2] == self.lexer.symbols.rwEXISTS: if self.debug: - print('existential quantifier, scope tokens: {} formula: {}'.format(p[3], p[4])) + print(f'existential quantifier, scope tokens: {p[3]} formula: {p[4]}') vars = p[3] phi = p[4] p[0] = QuantifiedFormula(Quantifier.Exists, [entry['term'] for entry in vars], phi) @@ -722,7 +723,7 @@ def p_term(self, p): try: func_name = self.instance.get(p[2]) except tsk.LanguageError as e: - msg = "Error parsing term in formula, function '{}' is not declared".format(p[2]) + msg = f"Error parsing term in formula, function '{p[2]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) sub_terms = p[3] p[0] = func_name(*sub_terms) @@ -732,14 +733,14 @@ def p_term(self, p): constant_ref = self.instance.get(p[1]) p[0] = constant_ref except tsk.LanguageError as e: - msg = "Error parsing term in formula, constant '{}' is not declared".format(p[1]) + msg = f"Error parsing term in formula, constant '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) elif self.lexer.is_variable(p[1]): try: var_ref = self.var_dict[p[1]] p[0] = var_ref except KeyError as e: - msg = "Error parsing term in formula, variable '{}' is not declared in the current scope".format(p[1]) + msg = f"Error parsing term in formula, variable '{p[1]}' is not declared in the current scope" raise SemanticError(self.lexer.lineno(), msg) def p_function_term(self, p): @@ -749,7 +750,7 @@ def p_function_term(self, p): sub_terms = p[3] p[0] = func_name(*sub_terms) except tsk.LanguageError as e: - msg = "Error parsing function term, function '{}' is not declared".format(p[2]) + msg = f"Error parsing function term, function '{p[2]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) def p_list_of_expression(self, p): @@ -797,7 +798,7 @@ def p_f_exp(self, p): p[0] = constant_ref return except tsk.LanguageError as e: - msg = "Error parsing expression, constant '{}' is not declared".format(p[1]) + msg = f"Error parsing expression, constant '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) else: # raise error @@ -833,7 +834,7 @@ def p_f_head(self, p): func_name = self.instance.get(p[1]) p[0] = func_name() except tsk.LanguageError as e: - msg = "Error parsing expression, function '{}' is not declared".format(p[1]) + msg = f"Error parsing expression, function '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) return @@ -842,7 +843,7 @@ def p_f_head(self, p): sub_terms = p[3] p[0] = func_name(*sub_terms) except tsk.LanguageError as e: - msg = "Error parsing expression, function '{}' is not declared".format(p[1]) + msg = f"Error parsing expression, function '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) def p_binary_op(self, p): @@ -1118,7 +1119,7 @@ def p_simple_duration_constraint(self, p): raise UnsupportedFeature(self.lexer.lineno(), msg) variable = p[3] if variable != '?duration': - msg = "Error parsing duration of durative action: found variable '{}' rather than '?duration'".format(variable) + msg = f"Error parsing duration of durative action: found variable '{variable}' rather than '?duration'" raise ParseError(self.lexer.lineno(), msg) p[0] = p[4] @@ -1228,17 +1229,17 @@ def p_derived_def(self, p): try: head_pred = self.instance.predicates.get(symbol) except ValueError as e: - msg = "Error parsing derived predicate, head symbol '{}' is not declared".format(symbol) + msg = f"Error parsing derived predicate, head symbol '{symbol}' is not declared" raise SemanticError(self.lexer.lineno(), msg) for k, arg in enumerate(head_pred.domain): if self.debug: print('signature: {} provided: {}'.format(head_pred.domain[k], var_list[k]['type'])) if not head_pred.domain[k] == var_list[k]['type']: - msg = "Error parsing derived predicate, head predicate '{}' type mismatch, check definition in (:predicates ...)".format(symbol) + msg = f"Error parsing derived predicate, head predicate '{symbol}' type mismatch, check definition in (:predicates ...)" raise SemanticError(self.lexer.lineno(), msg) dpred_body = p[4] if self.debug: - print("Body: {} type: {}".format(dpred_body, type(dpred_body))) + print(f"Body: {dpred_body} type: {type(dpred_body)}") self.instance.process_derived_predicate_skeleton(head_pred, var_list, dpred_body) # clear up scope for entry in var_list: @@ -1260,11 +1261,11 @@ def p_object_declaration(self, p): if isinstance(entry, tuple): typename, constant_list = entry if typename not in self.instance.types: - msg = "Error parsing (:objects ) section: type '{}' was not defined".format(typename) + msg = f"Error parsing (:objects ) section: type '{typename}' was not defined" raise SemanticError(self.lexer.lineno(), msg) self.instance.process_constant_definition(entry) else: - msg = "Error processing (:objects ) section: constant '{}' has no type attached".format(entry) + msg = f"Error processing (:objects ) section: constant '{entry}' has no type attached" raise SemanticError(self.lexer.lineno(), msg) if self.debug: total_constants = 0 @@ -1308,7 +1309,7 @@ def p_init_el(self, p): constant_term = self.instance.get(p[4]) p[0] = (p[3], constant_term) except tsk.LanguageError as e: - msg = "Error processing initial state: object '{}' was not defined".format(p[4]) + msg = f"Error processing initial state: object '{p[4]}' was not defined" raise SemanticError(self.lexer.lineno(), msg) def p_literal_of_name(self, p): @@ -1333,7 +1334,7 @@ def p_atomic_formula_of_name(self, p): try: func_symbol = self.instance.get(p[2]) except tsk.LanguageError as e: - msg = "Error parsing ground atomic formula: function '{}' is not defined".format(p[2]) + msg = f"Error parsing ground atomic formula: function '{p[2]}' is not defined" raise SemanticError(self.lexer.lineno(), msg) sub_terms = p[3] p[0] = { @@ -1416,9 +1417,9 @@ def p_error(self, p): # reached End of File return if self.debug: - print('Syntax error in input! See log file: {}'.format(self.logfile)) + print(f'Syntax error in input! See log file: {self.logfile}') - print('Syntax error in input! Line: {} failed token: {} next: {}'.format(p.lineno, p, self._parser.token())) + print(f'Syntax error in input! Line: {p.lineno} failed token: {p} next: {self._parser.token()}') while True: tok = self._parser.token() diff --git a/src/tarski/io/rddl.py b/src/tarski/io/rddl.py index fa46cce9..2c713c73 100644 --- a/src/tarski/io/rddl.py +++ b/src/tarski/io/rddl.py @@ -4,18 +4,22 @@ from enum import Enum from .. import modules -from .common import load_tpl +from ..errors import LanguageError +from ..evaluators.simple import evaluate from ..fol import FirstOrderLanguage -from ..syntax import implies, land, lor, neg, Connective, Quantifier, CompoundTerm, Interval, Atom, IfThenElse, \ - Contradiction, Tautology, CompoundFormula, forall, ite, AggregateCompoundTerm, QuantifiedFormula, Term, Function, \ - Variable, Predicate, Constant, Formula, builtins +from ..model import Model +from ..syntax import (AggregateCompoundTerm, Atom, CompoundFormula, + CompoundTerm, Connective, Constant, Contradiction, + Formula, Function, IfThenElse, Interval, Predicate, + QuantifiedFormula, Quantifier, Tautology, Term, Variable) from ..syntax import arithmetic as tm +from ..syntax import builtins, forall, implies, ite, land, lor, neg +from ..syntax.builtins import BuiltinFunctionSymbol as BFS +from ..syntax.builtins import BuiltinPredicateSymbol as BPS +from ..syntax.builtins import create_atom from ..syntax.temporal import ltl as tt -from ..syntax.builtins import create_atom, BuiltinPredicateSymbol as BPS, BuiltinFunctionSymbol as BFS -from ..model import Model -from ..evaluators.simple import evaluate -from ..errors import LanguageError from ..theories import Theory, language +from .common import load_tpl class TranslationError(Exception): @@ -219,9 +223,10 @@ def __init__(self, filename): @staticmethod def _load_rddl_model(filename): - with open(filename, 'r', encoding='utf8') as input_file: + with open(filename, encoding='utf8') as input_file: rddl = input_file.read() - parser = modules.import_pyrddl_parser()() + + parser = modules.RDDLParser() parser.build() # parse RDDL return parser.parse(rddl) @@ -426,12 +431,12 @@ def write_model(self, filename): reward_expr=self.get_reward(), action_precondition_list=self.get_preconditions(), state_invariant_list=self.get_state_invariants(), - domain_non_fluents='{}_non_fluents'.format(self.task.instance_name), + domain_non_fluents=f'{self.task.instance_name}_non_fluents', object_list=self.get_objects(), non_fluent_expr=self.get_non_fluent_init(), instance_name=self.task.instance_name, init_state_fluent_expr=self.get_state_fluent_init(), - non_fluents_ref='{}_non_fluents'.format(self.task.instance_name), + non_fluents_ref=f'{self.task.instance_name}_non_fluents', max_nondef_actions=self.get_max_nondef_actions(), horizon=self.get_horizon(), discount=self.get_discount() @@ -443,7 +448,8 @@ def get_requirements(self): return ', '.join([str(r) for r in self.task.requirements]) def get_types(self): - from ..syntax.sorts import parent # pylint: disable=import-outside-toplevel # Avoiding circular references + from ..syntax.sorts import \ + parent # pylint: disable=import-outside-toplevel # Avoiding circular references type_decl_list = [] for S in self.task.L.sorts: if S.builtin or S.name == 'object': @@ -451,7 +457,7 @@ def get_types(self): if isinstance(S, Interval): self.need_constraints[S.name] = S continue - type_decl_list += ['{} : {};'.format(S.name, parent(S).name)] + type_decl_list += [f'{S.name} : {parent(S).name};'] self.need_obj_decl += [S] return '\n'.join(type_decl_list) @@ -477,7 +483,7 @@ def get_signature(fl): else: assert False if len(domain) == 0: - return '{}'.format(head) + return f'{head}' return '{}({})'.format(head, ','.join(domain)) def get_pvars(self): @@ -485,30 +491,30 @@ def get_pvars(self): # state fluents for fl, v in self.task.state_fluents: rsig = self.get_signature(fl) - pvar_decl_list += ['\t{} : {{state-fluent, {}, default = {}}};'.format(rsig, self.get_type(fl), str(v))] + pvar_decl_list += [f'\t{rsig} : {{state-fluent, {self.get_type(fl)}, default = {str(v)}}};'] for fl, level in self.task.interm_fluents: rsig = self.get_signature(fl) try: self.interm_signatures.add(fl.symbol.signature) except AttributeError: self.interm_signatures.add(fl.predicate.signature) - pvar_decl_list += ['\t{} : {{interm-fluent, {}, level = {}}};'.format(rsig, self.get_type(fl), str(level))] + pvar_decl_list += [f'\t{rsig} : {{interm-fluent, {self.get_type(fl)}, level = {str(level)}}};'] for fl, v in self.task.action_fluents: rsig = self.get_signature(fl) - pvar_decl_list += ['\t{} : {{action-fluent, {}, default = {}}};'.format(rsig, self.get_type(fl), str(v))] + pvar_decl_list += [f'\t{rsig} : {{action-fluent, {self.get_type(fl)}, default = {str(v)}}};'] for fl, v in self.task.non_fluents: rsig = self.get_signature(fl) try: self.non_fluent_signatures.add(fl.symbol.signature) except AttributeError: self.non_fluent_signatures.add(fl.predicate.signature) - pvar_decl_list += ['\t{} : {{non-fluent, {}, default = {}}};'.format(rsig, self.get_type(fl), str(v))] + pvar_decl_list += [f'\t{rsig} : {{non-fluent, {self.get_type(fl)}, default = {str(v)}}};'] return '\n'.join(pvar_decl_list) def get_cpfs(self): cpfs_decl_list = [] for lhs, rhs in self.task.cpfs: - cpfs_decl_list += ['\t{} = {};'.format(self.get_fluent(lhs, True), self.rewrite(rhs))] + cpfs_decl_list += [f'\t{self.get_fluent(lhs, True)} = {self.rewrite(rhs)};'] return '\n'.join(cpfs_decl_list) def get_reward(self): @@ -518,14 +524,14 @@ def get_preconditions(self): act_prec_list = [] for expr, ctype in self.task.constraints: if ctype == ConstraintType.ACTION: - act_prec_list += ['\t{};'.format(self.rewrite(expr))] + act_prec_list += [f'\t{self.rewrite(expr)};'] return '\n'.join(act_prec_list) def get_state_invariants(self): state_inv_list = [] for expr, ctype in self.task.constraints: if ctype == ConstraintType.STATE: - state_inv_list += ['\t{};'.format(self.rewrite(expr))] + state_inv_list += [f'\t{self.rewrite(expr)};'] return '\n'.join(state_inv_list) def get_objects(self): @@ -537,7 +543,7 @@ def get_objects(self): # initialize for S in self.need_obj_decl: domain_str = ','.join([str(c.symbol) for c in S.domain()]) - obj_decl_blocks += ['\t{} : {{{}}};'.format(S.name, domain_str)] + obj_decl_blocks += [f'\t{S.name} : {{{domain_str}}};'] return 'objects {{{}}};'.format('\n'.join(obj_decl_blocks)) @@ -552,7 +558,7 @@ def get_non_fluent_init(self): term_str = signature[0] else: term_str = str(self.task.L.get(signature[0])(*subterms)) - non_fluent_init_list += ['\t{} = {};'.format(term_str, value)] + non_fluent_init_list += [f'\t{term_str} = {value};'] for signature, defs in self.task.x0.predicate_extensions.items(): if signature not in self.non_fluent_signatures: continue @@ -562,7 +568,7 @@ def get_non_fluent_init(self): atom_str = signature[0] else: atom_str = str(self.task.L.get(signature[0])(*subterms)) - non_fluent_init_list += ['\t{} = true;'.format(atom_str)] + non_fluent_init_list += [f'\t{atom_str} = true;'] if len(non_fluent_init_list) == 0: return '' @@ -581,7 +587,7 @@ def get_state_fluent_init(self): term_str = signature[0] else: term_str = str(self.task.L.get(signature[0])(*subterms)) - init_list += ['\t{} = {};'.format(term_str, value)] + init_list += [f'\t{term_str} = {value};'] for signature, defs in self.task.x0.predicate_extensions.items(): if signature in self.non_fluent_signatures \ or signature in self.interm_signatures: @@ -592,7 +598,7 @@ def get_state_fluent_init(self): atom_str = signature[0] else: atom_str = str(self.task.L.get(signature[0])(*subterms)) - init_list += ['\t{} = true;'.format(atom_str)] + init_list += [f'\t{atom_str} = true;'] return '\n'.join(init_list) @@ -603,7 +609,7 @@ def rewrite(self, expr): re_st = [self.rewrite(st) for st in expr.subterms] if expr.symbol.builtin: if expr.symbol.symbol in symbol_map.keys(): - return '({} {} {})'.format(re_st[0], symbol_map[expr.symbol.symbol], re_st[1]) + return f'({re_st[0]} {symbol_map[expr.symbol.symbol]} {re_st[1]})' st_str = '' if expr.symbol.builtin: if expr.symbol.symbol in function_map.keys(): @@ -614,19 +620,19 @@ def rewrite(self, expr): st_str = '({})'.format(','.join(re_st)) else: st_str = '[{}]'.format(','.join(re_st)) - return '{}{}'.format(function_map[expr.symbol.symbol], st_str) + return f'{function_map[expr.symbol.symbol]}{st_str}' if len(re_st) > 0: st_str = '({})'.format(','.join(re_st)) - return '{}{}'.format(expr.symbol.signature[0], st_str) + return f'{expr.symbol.signature[0]}{st_str}' elif isinstance(expr, Atom): re_st = [self.rewrite(st) for st in expr.subterms] if expr.predicate.builtin: if expr.predicate.symbol in symbol_map.keys(): - return '({} {} {})'.format(re_st[0], symbol_map[expr.predicate.symbol], re_st[1]) + return f'({re_st[0]} {symbol_map[expr.predicate.symbol]} {re_st[1]})' st_str = '' if len(re_st) > 0: st_str = '({})'.format(','.join(re_st)) - return '{}{}'.format(expr.predicate.signature[0], st_str) + return f'{expr.predicate.signature[0]}{st_str}' elif isinstance(expr, Variable): # remove ? just in case return '?{}'.format(expr.symbol.replace('?', '')) @@ -636,7 +642,7 @@ def rewrite(self, expr): cond = self.rewrite(expr.condition) expr1 = self.rewrite(expr.subterms[0]) expr2 = self.rewrite(expr.subterms[1]) - return 'if ({}) then ({}) else ({})'.format(cond, expr1, expr2) + return f'if ({cond}) then ({expr1}) else ({expr2})' elif isinstance(expr, Tautology): return 'true' elif isinstance(expr, Contradiction): @@ -645,16 +651,16 @@ def rewrite(self, expr): re_sf = [self.rewrite(st) for st in expr.subformulas] re_sym = symbol_map[expr.connective] if len(re_sf) == 1: - return '{}{}'.format(re_sym, re_sf) - return '({} {} {})'.format(re_sf[0], re_sym, re_sf[1]) + return f'{re_sym}{re_sf}' + return f'({re_sf[0]} {re_sym} {re_sf[1]})' elif isinstance(expr, QuantifiedFormula): re_f = self.rewrite(expr.formula) - re_vars = ['?{} : {}'.format(x.symbol, x.sort.name) for x in expr.variables] + re_vars = [f'?{x.symbol} : {x.sort.name}' for x in expr.variables] re_sym = symbol_map[expr.quantifier] return '{}_{{{}}} ({})'.format(re_sym, ','.join(re_vars), re_f) elif isinstance(expr, AggregateCompoundTerm): re_expr = self.rewrite(expr.subterm) - re_vars = ['?{} : {}'.format(x.symbol, x.sort.name) for x in expr.bound_vars] + re_vars = [f'?{x.symbol} : {x.sort.name}' for x in expr.bound_vars] if expr.symbol == BFS.ADD: re_sym = 'sum' elif expr.symbol == BFS.MUL: @@ -681,7 +687,7 @@ def get_fluent(fl, next_state=False): prima = '' if next_state: prima = "'" - return "{}{}{}".format(head, prima, subterms_str) + return f"{head}{prima}{subterms_str}" def get_max_nondef_actions(self): return str(self.task.parameters.max_nondef_actions) diff --git a/src/tarski/io/sas/fd.py b/src/tarski/io/sas/fd.py index c5f3fc55..a32ed527 100644 --- a/src/tarski/io/sas/fd.py +++ b/src/tarski/io/sas/fd.py @@ -7,9 +7,8 @@ # SAS instance writer for Fast Downward pre-processor # ---------------------------------------------------------------------------------------------------------------------- -from tarski.syntax import symref, CompoundTerm from tarski.io.sas.templates import * - +from tarski.syntax import CompoundTerm, symref FAST_DOWNWARD_SAS_VERSION = 4 diff --git a/src/tarski/model.py b/src/tarski/model.py index f7c2b1df..4f2f4233 100644 --- a/src/tarski/model.py +++ b/src/tarski/model.py @@ -2,7 +2,7 @@ from typing import Union from . import errors as err -from .syntax import Function, Constant, CompoundTerm, symref, Variable +from .syntax import CompoundTerm, Constant, Function, symref from .syntax.predicate import Predicate @@ -101,7 +101,8 @@ def set(self, term: CompoundTerm, value: Union[Constant, int, float], *args): def add(self, predicate, *args): """ """ - from .syntax import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .syntax import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references if isinstance(predicate, Atom): args = predicate.subterms predicate = predicate.predicate @@ -141,7 +142,8 @@ def list_all_extensions(self): This list *unwraps* the TermReference's used internally in this class back into plain Tarski terms, so that you can rely on the returned extensions being made up of Constants, Variables, etc., not TermReferences """ - from .syntax.util import get_symbols # pylint: disable=import-outside-toplevel # Avoiding circular references + from .syntax.util import \ + get_symbols # pylint: disable=import-outside-toplevel # Avoiding circular references exts = {k: [unwrap_tuple(tup) for tup in ext] for k, ext in self.predicate_extensions.items()} exts.update((k, [unwrap_tuple(point) + (value, ) for point, value in ext.data.items()]) for k, ext in self.function_extensions.items()) diff --git a/src/tarski/modules.py b/src/tarski/modules.py index 251b9dc9..a4234d18 100644 --- a/src/tarski/modules.py +++ b/src/tarski/modules.py @@ -1,34 +1,42 @@ -""" A helper module to deal with import of packages that depend on the installation of certain pip extras, -to keep Tarski modular and lightweight for simple uses, but optionally heavyweight for more sophisticated uses. """ +""" +This module helps lazily importing some packages that depend on the installation of certain pip extras, +and that therefore we cannot import greedily. +""" -# TODO: Whenever we raise the Python requirement to Python >= 3.7, we should migrate this to a better -# interface providing direct access to the desired package, e.g. allowing "from tarski.modules import pyrddl", -# which can be easily achieved with the new module-level __getattr__ -# https://docs.python.org/3/reference/datamodel.html#customizing-module-attribute-access - -def import_scipy_special(): +def _import_scipy_special(): try: - import scipy.special as sci # pylint: disable=import-outside-toplevel + import scipy.special # pylint: disable=import-outside-toplevel + return scipy.special except ImportError: raise ImportError('The scipy module does not seem available. ' - 'Please try installing Tarski with the "arithmetic" extra.') from None - return sci + 'Install Tarski with the "arithmetic" extra: pip install "tarski[arithmetic]"') from None -def import_numpy(): +def _import_numpy(): try: - import numpy as np # pylint: disable=import-outside-toplevel + import numpy # pylint: disable=import-outside-toplevel + return numpy except ImportError: raise ImportError('The numpy module does not seem available. ' - 'Please try installing Tarski with the "arithmetic" extra.') from None - return np + 'Install Tarski with the "arithmetic" extra: pip install "tarski[arithmetic]"') from None -def import_pyrddl_parser(): +def _import_pyrddl_parser(): try: - from pyrddl.parser import RDDLParser # pylint: disable=import-outside-toplevel + from pyrddl.parser import \ + RDDLParser # pylint: disable=import-outside-toplevel + return RDDLParser except ImportError: raise ImportError('The pyrddl module does not seem available. ' - 'Please try installing Tarski with the "rddl" extra.') from None - return RDDLParser + 'Install Tarski with the "rddl" extra: pip install "tarski[rddl]"') from None + + +def __getattr__(name, *args, **kwargs): + if name == 'RDDLParser': + return _import_pyrddl_parser() + elif name == 'numpy': + return _import_numpy() + elif name == 'scipy_special': + return _import_scipy_special() + raise ImportError(f'Module "{name}" is not available') diff --git a/src/tarski/ndl/temporal.py b/src/tarski/ndl/temporal.py index 7a9a90ac..ccc4edba 100644 --- a/src/tarski/ndl/temporal.py +++ b/src/tarski/ndl/temporal.py @@ -8,7 +8,8 @@ Proceedings of the 26th Int'l Joint Conference on Artificial Intelligence (IJCAI) 2017 """ -from ..syntax import Atom, CompoundTerm, CompoundFormula, Constant, symref, Connective, Tautology +from ..syntax import (Atom, CompoundFormula, CompoundTerm, Connective, + Constant, Tautology, symref) class NDLSyntaxError(Exception): @@ -30,10 +31,10 @@ def __init__(self, **kwargs): self.td = kwargs['td'] self.r = kwargs['r'] if not isinstance(self.r, CompoundTerm): - raise NDLSyntaxError("NDL Syntactic Error: resource lock needs to be a term (given: {})".format(self.r)) + raise NDLSyntaxError(f"NDL Syntactic Error: resource lock needs to be a term (given: {self.r})") def __str__(self): - return "LOCK {} AFTER {} FOR {}".format(self.r, self.ts, self.td) + return f"LOCK {self.r} AFTER {self.ts} FOR {self.td}" class ResourceLevel: @@ -43,17 +44,17 @@ def __init__(self, **kwargs): self.td = kwargs['td'] self.r = kwargs['r'] if not isinstance(self.r, CompoundTerm): - raise NDLSyntaxError("NDL Syntactic Error: resource lock must refer to term (given: {})".format(self.r)) + raise NDLSyntaxError(f"NDL Syntactic Error: resource lock must refer to term (given: {self.r})") self.n = kwargs['n'] if not isinstance(self.n, Constant): - raise NDLSyntaxError("NDL Syntactic Error: resource level must be a constant (given: {}".format(self.n)) + raise NDLSyntaxError(f"NDL Syntactic Error: resource level must be a constant (given: {self.n}") if self.n.sort != self.r.sort: raise NDLSyntaxError( "NDL Type Mismatch: resource and level have different sorts (resource is: {}, level is: {}".format( self.r.sort, self.n.sort)) def __str__(self): - return "LOCK {} AFTER {} FOR {}".format(self.r, self.ts, self.td) + return f"LOCK {self.r} AFTER {self.ts} FOR {self.td}" class SetLiteralEffect: @@ -66,7 +67,7 @@ def __init__(self, lit, value): self.value = value def __str__(self): - return "SET({}, {})".format(self.lit, self.value) + return f"SET({self.lit}, {self.value})" class AssignValueEffect: @@ -79,7 +80,7 @@ def __init__(self, atom, value): self.value = value def __str__(self): - return "ASSIGN({}, {})".format(self.atom, self.value) + return f"ASSIGN({self.atom}, {self.value})" class UniversalEffect: @@ -92,7 +93,7 @@ def __init__(self, variable, effect): self.eff = effect def __str__(self): - return "FORALL({}, {})".format(self.var, self.effect) + return f"FORALL({self.var}, {self.effect})" class ConditionalEffect: @@ -106,7 +107,7 @@ def __init__(self, cond, then_eff, else_eff): self.else_eff = else_eff def __str__(self): - return "IF ({}) \nTHEN {}\n ELSE {}".format(self.condition, self.then_eff, self.else_eff) + return f"IF ({self.condition}) \nTHEN {self.then_eff}\n ELSE {self.else_eff}" class TimedEffect: @@ -119,7 +120,7 @@ def __init__(self, delay, eff): self.eff = eff def __str__(self): - return "AFTER {} APPLY {}".format(self.delay, self.eff) + return f"AFTER {self.delay} APPLY {self.eff}" class UnionExpression: @@ -190,13 +191,13 @@ def __init__(self, **kwargs): self.levels += [req] self.max_eff_time = max(self.max_eff_time, req.eff.td) else: - raise NDLSyntaxError("NDL syntax error: '{}' is not a resource lock or level request".format(req)) + raise NDLSyntaxError(f"NDL syntax error: '{req}' is not a resource lock or level request") # effects self.untimed_effects = [] self.timed_effects = [] for eff in kwargs['timed_effects']: if not isinstance(eff, TimedEffect): - raise NDLSyntaxError("NDL Syntax error: eff '{}' must be timed".format(eff)) + raise NDLSyntaxError(f"NDL Syntax error: eff '{eff}' must be timed") self.timed_effects += [eff] self.max_eff_time = max(self.max_eff_time, eff.delay) wrapped_effect = eff.eff @@ -205,7 +206,7 @@ def __init__(self, **kwargs): elif isinstance(wrapped_effect, SetLiteralEffect): self.effect_times[(symref(wrapped_effect.lit), wrapped_effect.value)] = eff.delay else: - raise NotImplementedError("Effects of type {} cannot be handled yet".format(type(wrapped_effect))) + raise NotImplementedError(f"Effects of type {type(wrapped_effect)} cannot be handled yet") for elem in kwargs['untimed_effects']: self.untimed_effects += [(0, elem)] diff --git a/src/tarski/rddl/__init__.py b/src/tarski/rddl/__init__.py index 280e6a2a..54a602d9 100644 --- a/src/tarski/rddl/__init__.py +++ b/src/tarski/rddl/__init__.py @@ -1 +1,3 @@ from .task import Task + +__all__ = ['Task'] diff --git a/src/tarski/rddl/task.py b/src/tarski/rddl/task.py index 50ce544c..b952d037 100644 --- a/src/tarski/rddl/task.py +++ b/src/tarski/rddl/task.py @@ -1,8 +1,7 @@ - +from ..evaluators.simple import evaluate from ..fol import FirstOrderLanguage from ..io import rddl from ..model import Model -from ..evaluators.simple import evaluate class Task: diff --git a/src/tarski/reachability/__init__.py b/src/tarski/reachability/__init__.py index 9b9907a8..80a17208 100644 --- a/src/tarski/reachability/__init__.py +++ b/src/tarski/reachability/__init__.py @@ -1,5 +1,4 @@ - from .asp import create_reachability_lp -from .clingo_wrapper import run_clingo, parse_model +from .clingo_wrapper import parse_model, run_clingo __all__ = ['create_reachability_lp', 'run_clingo', 'parse_model'] diff --git a/src/tarski/reachability/asp.py b/src/tarski/reachability/asp.py index 03d1871c..9a374b4e 100644 --- a/src/tarski/reachability/asp.py +++ b/src/tarski/reachability/asp.py @@ -3,15 +3,18 @@ """ import itertools +from ..fstrips import (AddEffect, DelEffect, FunctionalEffect, Problem, + SingleEffect) from ..fstrips.action import AdditiveActionCost -from ..syntax.transform import remove_quantifiers, QuantifierEliminationMode +from ..fstrips.representation import (expand_universal_effect, + identify_cost_related_functions) +from ..syntax import (Atom, BuiltinPredicateSymbol, CompoundFormula, + CompoundTerm, Connective, Constant, Formula, + QuantifiedFormula, Quantifier, Tautology, Term, Variable) from ..syntax.builtins import symbol_complements from ..syntax.ops import free_variables -from ..syntax import Formula, Atom, CompoundFormula, Connective, Term, Variable, Constant, Tautology, \ - BuiltinPredicateSymbol, QuantifiedFormula, Quantifier, CompoundTerm -from ..syntax.sorts import parent, Interval -from ..fstrips import Problem, SingleEffect, AddEffect, DelEffect, FunctionalEffect -from ..fstrips.representation import identify_cost_related_functions, expand_universal_effect +from ..syntax.sorts import Interval, parent +from ..syntax.transform import QuantifierEliminationMode, remove_quantifiers GOAL = "goal" @@ -45,7 +48,7 @@ def __init__(self, problem: Problem, lp, include_variable_inequalities=False, in def gen_aux_atom(self, args=None): """ Return a new auxiliary atom with the given arguments """ self.aux_atom_count += 1 - return self.lp_atom("__f{}".format(self.aux_atom_count), args) + return self.lp_atom(f"__f{self.aux_atom_count}", args) def create(self): problem, lang, lp = self.problem, self.problem.language, self.lp @@ -127,7 +130,7 @@ def process_action(self, action, lang, lp): def process_action_cost(self, action, action_atom, parameters_types, lp): """ Process the increase-total-cost effect of the given action. This results in a LP atom of the form cost(action(X), 7) :- block(X). """ - used_varnames = set(make_variable_name(v.symbol) for v in action.parameters) + used_varnames = {make_variable_name(v.symbol) for v in action.parameters} if action.cost is None: lp.rule(f'cost({action_atom}, 1)', parameters_types) elif isinstance(action.cost, AdditiveActionCost): @@ -191,7 +194,7 @@ def process_formula(self, f: Formula): return [negate_lp_atom(processed)] else: - raise RuntimeError('Unexpected connective "{}" within CompoundFormula "{}"'.format(f.connective, f)) + raise RuntimeError(f'Unexpected connective "{f.connective}" within CompoundFormula "{f}"') elif isinstance(f, QuantifiedFormula): if f.quantifier == Quantifier.Exists: @@ -208,7 +211,7 @@ def process_formula(self, f: Formula): assert f.quantifier == Quantifier.Forall raise RuntimeError('Formula should be forall-free, revise source code') - raise RuntimeError('Unexpected formula "{}" with type "{}"'.format(f, type(f))) + raise RuntimeError(f'Unexpected formula "{f}" with type "{type(f)}"') @staticmethod def process_term(t: Term): @@ -220,7 +223,7 @@ def process_term(t: Term): elif isinstance(t, Constant): return str(t.symbol) - raise RuntimeError('Unexpected term "{}" with type "{}"'.format(t, type(t))) + raise RuntimeError(f'Unexpected term "{t}" with type "{type(t)}"') def process_effect(self, lang, eff, action_name): """ Process a given effect and return the corresponding LP rule (a pair with head and body). For instance a @@ -301,9 +304,9 @@ def __init__(self, symbol: str, args=None, infix=False): def __str__(self): """ Return a string of the form 'symbol(arg1, ..., argn)', or 'symbol()', if args is empty """ if self.infix: - return "{} {} {}".format(self.args[0], self.symbol, self.args[1]) + return f"{self.args[0]} {self.symbol} {self.args[1]}" arglist = ", ".join(str(arg) for arg in _ensure_list(self.args)) - return "{}({})".format(self.symbol, arglist) + return f"{self.symbol}({arglist})" __repr__ = __str__ @@ -400,7 +403,7 @@ def rule(self, head, body=None): def _print_rule(head, body): assert body is None or isinstance(body, (list, tuple)) - return "{}.".format(head) if body is None else "{} :- {}.".format(head, _print_body(body)) + return f"{head}." if body is None else f"{head} :- {_print_body(body)}." def _print_body(body): @@ -415,7 +418,7 @@ def sanitize(name: str): def _var(i=0): """ Return a distinct variable name for each given value of i """ alphabet = "XYZABCDEFGHIJKLMNOPQRSTUVW" - return alphabet[i] if i < len(alphabet) else "X{}".format(i) + return alphabet[i] if i < len(alphabet) else f"X{i}" def generate_varname(avoid=None): diff --git a/src/tarski/reachability/clingo_wrapper.py b/src/tarski/reachability/clingo_wrapper.py index f19277cb..43ea7323 100644 --- a/src/tarski/reachability/clingo_wrapper.py +++ b/src/tarski/reachability/clingo_wrapper.py @@ -1,14 +1,15 @@ import logging import os -import sys import shutil +import sys import tempfile -from pathlib import Path from collections import defaultdict +from importlib.util import find_spec +from pathlib import Path -from ..errors import CommandNotFoundError, ExternalCommandError, OutOfMemoryError, OutOfTimeError, ArgumentError +from ..errors import (ArgumentError, CommandNotFoundError, + ExternalCommandError, OutOfMemoryError, OutOfTimeError) from ..utils import command as cmd -from importlib.util import find_spec def get_gringo_command(): @@ -24,7 +25,7 @@ def get_gringo_command(): else: gringo = shutil.which("gringo") command = [gringo] if gringo else None - logging.debug('Using gringo binary found in "{}"'.format(gringo)) + logging.debug(f'Using gringo binary found in "{gringo}"') return command @@ -51,7 +52,7 @@ def run_clingo(lp): return model_filename, theory_filename if os.path.isfile(stderr.name): - with open(stderr.name, 'r', encoding='utf8') as file: + with open(stderr.name, encoding='utf8') as file: errlog = file.read() if 'std::bad_alloc' in errlog: @@ -64,7 +65,7 @@ def run_clingo(lp): def parse_model(*, filename=None, content=None, symbol_mapping): if filename and not content: - with open(filename, "r", encoding='utf8') as f: + with open(filename, encoding='utf8') as f: return _parse_model(f, symbol_mapping) elif content and not filename: return _parse_model(content.splitlines(), symbol_mapping) diff --git a/src/tarski/reachability/gringo.py b/src/tarski/reachability/gringo.py index 9a7d1475..c6fe4f0f 100644 --- a/src/tarski/reachability/gringo.py +++ b/src/tarski/reachability/gringo.py @@ -1,5 +1,8 @@ #!/usr/bin/env python3 import sys +from typing import Sequence + +from clingo import Control # type: ignore from clingo.application import Application, clingo_main # type: ignore @@ -10,17 +13,17 @@ class WrapperClingo(Application): def __init__(self, name): self.program_name = name - def main(self, ctl, files): + def main(self, control: Control, files: Sequence[str]) -> None: """ The default implementation from clingo documentation Note- main(...) must be implemented """ for f in files: - ctl.load(f) + control.load(f) if not files: - ctl.load("-") - ctl.ground([("base", [])]) - ctl.solve() + control.load("-") + control.ground([("base", [])]) + control.solve() # run the clingo application in the default gringo mode diff --git a/src/tarski/sas/__init__.py b/src/tarski/sas/__init__.py index ca86ac1f..f383facb 100644 --- a/src/tarski/sas/__init__.py +++ b/src/tarski/sas/__init__.py @@ -9,7 +9,7 @@ from collections import namedtuple - Schema = namedtuple('Schema', ['name', 'variables', 'constraints', 'transitions']) Action = namedtuple('Action', ['name', 'arguments', 'transitions']) +__all__ = ['Schema', 'Action'] diff --git a/src/tarski/sas/util.py b/src/tarski/sas/util.py index 7936c526..30be08f6 100644 --- a/src/tarski/sas/util.py +++ b/src/tarski/sas/util.py @@ -7,14 +7,15 @@ # Utility method to process SAS # ---------------------------------------------------------------------------------------------------------------------- from itertools import product + import tarski.model from tarski.evaluators.simple import evaluate -from tarski.syntax import symref -from tarski.theories import Theory -from tarski.syntax.transform.substitutions import substitute_expression, create_substitution - from tarski.sas import Action from tarski.sas.temporal import TemporalAction +from tarski.syntax import symref +from tarski.syntax.transform.substitutions import (create_substitution, + substitute_expression) +from tarski.theories import Theory def check_constraints(C, s, subst): diff --git a/src/tarski/search/__init__.py b/src/tarski/search/__init__.py index 7181fb4e..871bc0f3 100644 --- a/src/tarski/search/__init__.py +++ b/src/tarski/search/__init__.py @@ -1,3 +1,4 @@ - -from .model import SearchModel, GroundForwardSearchModel from .blind import BreadthFirstSearch +from .model import GroundForwardSearchModel, SearchModel + +__all__ = ['SearchModel', 'GroundForwardSearchModel', 'BreadthFirstSearch'] diff --git a/src/tarski/search/model.py b/src/tarski/search/model.py index 6ad2e840..38d24848 100644 --- a/src/tarski/search/model.py +++ b/src/tarski/search/model.py @@ -1,6 +1,5 @@ - -from .operations import is_applicable, progress from ..evaluators.simple import evaluate +from .operations import is_applicable, progress class SearchModel: diff --git a/src/tarski/search/operations.py b/src/tarski/search/operations.py index 58f5336d..973f263f 100644 --- a/src/tarski/search/operations.py +++ b/src/tarski/search/operations.py @@ -1,7 +1,7 @@ import copy -from ..fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect from ..evaluators.simple import evaluate +from ..fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect from ..fstrips.representation import substitute_expression from ..syntax.transform.substitutions import enumerate_substitutions diff --git a/src/tarski/syntax/__init__.py b/src/tarski/syntax/__init__.py index 2db60cfc..9bb4cb1a 100644 --- a/src/tarski/syntax/__init__.py +++ b/src/tarski/syntax/__init__.py @@ -1,12 +1,56 @@ - +from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol +from .formulas import (Atom, CompoundFormula, Connective, Contradiction, + Formula, QuantifiedFormula, Quantifier, Tautology, + VariableBinding, bot, equiv, exists, forall, implies, + is_and, is_neg, is_or, land, lor, neg, top) from .function import Function from .predicate import Predicate -from .sorts import Sort, Interval, inclusion_closure -from .terms import Term, Constant, Variable, CompoundTerm, IfThenElse, ite, AggregateCompoundTerm -from .util import termlists_are_equal, termlist_hash -from .formulas import land, lor, neg, implies, forall, exists, equiv, Connective, Atom, Formula,\ - CompoundFormula, QuantifiedFormula, Tautology, Contradiction, top, bot, Quantifier, VariableBinding, \ - is_neg, is_and, is_or -from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol +from .sorts import Interval, Sort, inclusion_closure from .symrefs import symref +from .terms import (AggregateCompoundTerm, CompoundTerm, Constant, IfThenElse, + Term, Variable, ite) from .transform.substitutions import create_substitution, substitute_expression +from .util import termlist_hash, termlists_are_equal + +__all__ = [ + 'AggregateCompoundTerm', + 'Atom', + 'BuiltinFunctionSymbol', + 'BuiltinPredicateSymbol', + 'CompoundFormula', + 'CompoundTerm', + 'Connective', + 'Constant', + 'Contradiction', + 'Formula', + 'Function', + 'IfThenElse', + 'Interval', + 'Predicate', + 'QuantifiedFormula', + 'Quantifier', + 'Sort', + 'Tautology', + 'Term', + 'Variable', + 'VariableBinding', + 'bot', + 'create_substitution', + 'equiv', + 'exists', + 'forall', + 'implies', + 'inclusion_closure', + 'is_and', + 'is_neg', + 'is_or', + 'ite', + 'land', + 'lor', + 'neg', + 'substitute_expression', + 'symref', + 'termlist_hash', + 'termlists_are_equal', + 'top', +] diff --git a/src/tarski/syntax/algebra/__init__.py b/src/tarski/syntax/algebra/__init__.py index 4200d222..bc589b5a 100644 --- a/src/tarski/syntax/algebra/__init__.py +++ b/src/tarski/syntax/algebra/__init__.py @@ -1 +1,3 @@ from .matrix import Matrix + +__all__ = ['Matrix'] diff --git a/src/tarski/syntax/algebra/matrix.py b/src/tarski/syntax/algebra/matrix.py index bd8ee68f..a13209b7 100644 --- a/src/tarski/syntax/algebra/matrix.py +++ b/src/tarski/syntax/algebra/matrix.py @@ -1,12 +1,12 @@ +from ... import errors as err from ... import modules -from ...syntax import Term, Constant +from ...syntax import Constant, Term from ...syntax.sorts import Sort -from ... import errors as err class Matrix(Term): def __init__(self, arraylike, sort: Sort): - np = modules.import_numpy() + np = modules.numpy self.matrix = np.array(arraylike, dtype=np.dtype(object)) self._sort = sort # verify and cast @@ -43,7 +43,7 @@ def __getitem__(self, arg): return self.matrix[i, j] def __str__(self): - return '{}'.format(self.matrix) + return f'{self.matrix}' __repr__ = __str__ diff --git a/src/tarski/syntax/arithmetic/__init__.py b/src/tarski/syntax/arithmetic/__init__.py index 3f95d88e..38568274 100644 --- a/src/tarski/syntax/arithmetic/__init__.py +++ b/src/tarski/syntax/arithmetic/__init__.py @@ -1,15 +1,16 @@ # pylint: disable=redefined-builtin -import itertools import copy +import itertools -from ..transform.substitutions import substitute_expression -from ...syntax import Term, AggregateCompoundTerm, CompoundTerm, Constant, Variable, IfThenElse, create_substitution -from ...syntax.algebra import Matrix from ... import errors as err -from ... grounding.naive import instantiation -from ..builtins import BuiltinFunctionSymbol, get_arithmetic_binary_functions from ... import modules +from ...grounding.naive import instantiation +from ...syntax import (AggregateCompoundTerm, CompoundTerm, Constant, + IfThenElse, Term, Variable, create_substitution) +from ...syntax.algebra import Matrix +from ..builtins import BuiltinFunctionSymbol, get_arithmetic_binary_functions +from ..transform.substitutions import substitute_expression def sumterm(*args): @@ -32,15 +33,12 @@ def prodterm(*args): variables = args[:-1] expr = args[-1] if len(variables) < 1: - raise err.SyntacticError(msg='sumterm(x0,x1,...,xn,expr) requires at least one\ - bound variable, arguments: {}'.format(args)) + raise err.SyntacticError(f'prod(x0,x1,...,xn,expr) requires at least one bound variable, arguments: {args}') for x in variables: if not isinstance(x, Variable): - raise err.SyntacticError(msg='sum(x0,...,xn,expr) require each\ - argument xi to be an instance of Variable') + raise err.SyntacticError('prod(x0,...,xn,expr) requires each argument xi to be an instance of Variable') if not isinstance(expr, Term): - raise err.SyntacticError(msg='sum(x0,x1,...,xn,expr) requires last \ - argument "expr" to be an instance of Term, got "{}"'.format(expr)) + raise err.SyntacticError(f'prod(x0,x1,...,xn,expr) requires "expr" to be a Term, got "{expr}"') return AggregateCompoundTerm(BuiltinFunctionSymbol.MUL, variables, expr) @@ -152,7 +150,6 @@ def one(sort): def simplify(expr: Term) -> Term: - np = modules.import_numpy() if isinstance(expr, Constant): return expr elif isinstance(expr, Variable): @@ -205,7 +202,7 @@ def simplify(expr: Term) -> Term: return one(expr.sort) expr.subterms = (simplified,) return expr - elif isinstance(expr, (Matrix, np.ndarray)): + elif isinstance(expr, (Matrix, modules.numpy.ndarray)): N, M = expr.shape for i in range(N): for j in range(M): @@ -218,4 +215,4 @@ def simplify(expr: Term) -> Term: expr.subterms = (simplify(expr.subterms[0]), simplify(expr.subterms[1])) return expr - raise NotImplementedError("Can't handle expression {} yet".format(expr)) + raise NotImplementedError(f"Can't handle expression {expr} yet") diff --git a/src/tarski/syntax/arithmetic/random.py b/src/tarski/syntax/arithmetic/random.py index 6b508dae..e7fdd794 100644 --- a/src/tarski/syntax/arithmetic/random.py +++ b/src/tarski/syntax/arithmetic/random.py @@ -1,6 +1,5 @@ - -from ..builtins import BuiltinFunctionSymbol as bfs from ... import modules +from ..builtins import BuiltinFunctionSymbol as bfs def normal(mu, sigma): @@ -10,8 +9,7 @@ def normal(mu, sigma): try: normal_func = mu.language.get_function(bfs.NORMAL) except AttributeError: - np = modules.import_numpy() - return np.random.normal(mu, sigma) + return modules.numpy.random.normal(mu, sigma) return normal_func(mu, sigma) @@ -22,6 +20,5 @@ def gamma(shape, scale): try: gamma_func = scale.language.get_function(bfs.GAMMA) except AttributeError: - np = modules.import_numpy() - return np.random.gamma(shape, scale) + return modules.numpy.random.gamma(shape, scale) return gamma_func(shape, scale) diff --git a/src/tarski/syntax/builtins.py b/src/tarski/syntax/builtins.py index b2cd6828..935ecd8d 100644 --- a/src/tarski/syntax/builtins.py +++ b/src/tarski/syntax/builtins.py @@ -58,7 +58,8 @@ def is_builtin_predicate(predicate): def create_atom(lang, symbol: BuiltinPredicateSymbol, lhs, rhs): - from .formulas import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .formulas import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references predicate = lang.get_predicate(symbol) return Atom(predicate, [lhs, rhs]) @@ -67,7 +68,8 @@ def negate_builtin_atom(atom): """ Given an atom based on a built-in predicate, return an equivalent atom with the negation absorbed. If the atom is not based on a built-in predicate, return the atom unchanged. """ - from .formulas import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .formulas import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references if isinstance(atom, Atom) and atom.predicate.builtin: pred = atom.predicate return create_atom(pred.language, pred.symbol.complement(), *atom.subterms) diff --git a/src/tarski/syntax/factory.py b/src/tarski/syntax/factory.py index bc10c86b..0c8c71ac 100644 --- a/src/tarski/syntax/factory.py +++ b/src/tarski/syntax/factory.py @@ -1,7 +1,7 @@ from .. import errors as err +from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol from .formulas import Atom from .terms import Term -from .builtins import BuiltinPredicateSymbol, BuiltinFunctionSymbol def check_same_language(lhs, rhs): diff --git a/src/tarski/syntax/formulas.py b/src/tarski/syntax/formulas.py index df0d5f31..cfa3994b 100644 --- a/src/tarski/syntax/formulas.py +++ b/src/tarski/syntax/formulas.py @@ -4,9 +4,9 @@ from .. import errors as err from .builtins import BuiltinPredicateSymbol -from .terms import Variable, Term -from .util import termlists_are_equal, termlist_hash from .predicate import Predicate +from .terms import Term, Variable +from .util import termlist_hash, termlists_are_equal class Connective(Enum): @@ -102,7 +102,7 @@ def __init__(self, connective, subformulas): def _check_well_formed(self): if any(not isinstance(f, Formula) for f in self.subformulas): - raise err.LanguageError("Wrong argument types for compound formula: '{}' ".format(self.subformulas)) + raise err.LanguageError(f"Wrong argument types for compound formula: '{self.subformulas}' ") if self.connective == Connective.Not: if len(self.subformulas) != 1: @@ -113,10 +113,10 @@ def _check_well_formed(self): def __str__(self): if self.connective == Connective.Not: assert len(self.subformulas) == 1 - return "({} {})".format(self.connective, str(self.subformulas[0])) + return f"({self.connective} {str(self.subformulas[0])})" - inner = " {} ".format(self.connective).join(str(f) for f in self.subformulas) - return "({})".format(inner) + inner = f" {self.connective} ".join(str(f) for f in self.subformulas) + return f"({inner})" __repr__ = __str__ @@ -148,7 +148,7 @@ def _check_well_formed(self): def __str__(self): vars_ = ', '.join(str(x) for x in self.variables) - return '{} {} : ({})'.format(self.quantifier, vars_, self.formula) + return f'{self.quantifier} {vars_} : ({self.formula})' __repr__ = __str__ @@ -244,10 +244,10 @@ def quantified(quantifier, *args): variables, formula = args[:-1], args[-1] if not isinstance(formula, Formula): - raise err.LanguageError('Illformed arguments for quantified formula: {}'.format(args)) + raise err.LanguageError(f'Illformed arguments for quantified formula: {args}') if not all(isinstance(x, Variable) for x in variables): - raise err.LanguageError('Illformed arguments for quantified formula: {}'.format(args)) + raise err.LanguageError(f'Illformed arguments for quantified formula: {args}') return QuantifiedFormula(quantifier, variables, args[-1]) @@ -308,7 +308,7 @@ def _check_well_formed(self): head = self.predicate if not isinstance(head, Predicate): - raise err.LanguageError("Incorrect atom head: '{}' ".format(head)) + raise err.LanguageError(f"Incorrect atom head: '{head}' ") # Check arities match if len(self.subterms) != self.predicate.arity: @@ -319,7 +319,7 @@ def _check_well_formed(self): # Check arguments are all terms of the appropriate type and matching language for arg, expected_sort in zip(self.subterms, head.sort): if not isinstance(arg, Term): - raise err.LanguageError("Wrong argument for atomic formula: '{}' ".format(arg)) + raise err.LanguageError(f"Wrong argument for atomic formula: '{arg}' ") if arg.language != language: raise err.LanguageMismatch(arg, arg.language, language) diff --git a/src/tarski/syntax/function.py b/src/tarski/syntax/function.py index 4159f38d..f03fcb52 100644 --- a/src/tarski/syntax/function.py +++ b/src/tarski/syntax/function.py @@ -1,4 +1,3 @@ - from ..errors import LanguageError, LanguageMismatch from .sorts import Sort @@ -55,5 +54,6 @@ def __str__(self): __repr__ = __str__ def __call__(self, *args): - from .terms import CompoundTerm # pylint: disable=import-outside-toplevel # Avoiding circular references + from .terms import \ + CompoundTerm # pylint: disable=import-outside-toplevel # Avoiding circular references return CompoundTerm(self, args) diff --git a/src/tarski/syntax/ops.py b/src/tarski/syntax/ops.py index 0aa3ee41..4611e55e 100644 --- a/src/tarski/syntax/ops.py +++ b/src/tarski/syntax/ops.py @@ -1,12 +1,12 @@ import itertools -from .walker import FOLWalker from .. import modules -from .sorts import children, compute_direct_sort_map, Interval -from .visitors import CollectFreeVariables -from .terms import Term, Constant, Variable, CompoundTerm from .formulas import CompoundFormula, Connective +from .sorts import Interval, children, compute_direct_sort_map from .symrefs import symref +from .terms import CompoundTerm, Constant, Term, Variable +from .visitors import CollectFreeVariables +from .walker import FOLWalker def cast_to_closest_common_numeric_ancestor(lang, lhs, rhs): @@ -17,7 +17,7 @@ def cast_to_closest_common_numeric_ancestor(lang, lhs, rhs): if isinstance(lhs, Term) and isinstance(rhs, Term): return lhs, rhs - np = modules.import_numpy() + np = modules.numpy if isinstance(lhs, Term): if isinstance(rhs, np.ndarray): # lhs is scalar, rhs is matrix return lhs.language.matrix([[lhs]], lhs.sort), rhs diff --git a/src/tarski/syntax/predicate.py b/src/tarski/syntax/predicate.py index a3a92f2d..4d50fe3c 100644 --- a/src/tarski/syntax/predicate.py +++ b/src/tarski/syntax/predicate.py @@ -1,4 +1,3 @@ - from ..errors import LanguageError, LanguageMismatch from .sorts import Sort @@ -52,5 +51,6 @@ def __str__(self): __repr__ = __str__ def __call__(self, *args): - from .formulas import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .formulas import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references return Atom(self, args) diff --git a/src/tarski/syntax/sorts.py b/src/tarski/syntax/sorts.py index 28877bd9..dc6cf370 100644 --- a/src/tarski/syntax/sorts.py +++ b/src/tarski/syntax/sorts.py @@ -16,7 +16,7 @@ def __init__(self, name, language, builtin=False): self.builtin = builtin def __str__(self): - return 'Sort({})'.format(self.name) + return f'Sort({self.name})' __repr__ = __str__ @@ -52,7 +52,8 @@ def cast(self, x): def to_constant(self, x): """ Cast the given element to a constant of this sort. """ - from . import Constant, Variable # pylint: disable=import-outside-toplevel # Avoiding circular references + from . import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Constant, Variable) if isinstance(x, (Constant, Variable)) and x.sort == self: return x if x not in self._domain: @@ -111,12 +112,13 @@ def cast(self, x): # pass y = self.encode(x) # can raise ValueError if not self.is_within_bounds(y): - raise ValueError("Cast: Symbol '{}' (encoded '{}') outside of defined interval bounds".format(x, y)) + raise ValueError(f"Cast: Symbol '{x}' (encoded '{y}') outside of defined interval bounds") return y def to_constant(self, x): """ Cast the given element to a constant of this sort. """ - from . import Constant, Variable # pylint: disable=import-outside-toplevel # Avoiding circular references + from . import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Constant, Variable) if isinstance(x, (Constant, Variable)) and x.sort == self: return x return Constant(self.cast(x), self) @@ -157,7 +159,8 @@ def dump(self): def domain(self): if self.builtin or self.upper_bound - self.lower_bound > 9999: # Yes, very hacky raise err.TarskiError(f'Cannot iterate over interval with range [{self.lower_bound}, {self.upper_bound}]') - from . import Constant # pylint: disable=import-outside-toplevel # Avoiding circular references + from . import \ + Constant # pylint: disable=import-outside-toplevel # Avoiding circular references return (Constant(x, self) for x in range(self.lower_bound, self.upper_bound + 1)) @@ -238,8 +241,7 @@ def compute_signature_bindings(signature): """ Return an exhaustive list of all possible bindings compatible with the given signature, i.e. list of sorts. """ domains = [s.domain() for s in signature] - for binding in itertools.product(*domains): - yield binding + yield from itertools.product(*domains) def compute_direct_sort_map(lang): diff --git a/src/tarski/syntax/symrefs.py b/src/tarski/syntax/symrefs.py index 7f6f814d..dacc41f3 100644 --- a/src/tarski/syntax/symrefs.py +++ b/src/tarski/syntax/symrefs.py @@ -1,4 +1,3 @@ - from .formulas import Formula from .terms import Term @@ -29,6 +28,6 @@ def __eq__(self, other): return self.__class__ is other.__class__ and self.expr.is_syntactically_equal(other.expr) def __str__(self): - return "symref[{}]".format(self.expr) + return f"symref[{self.expr}]" __repr__ = __str__ diff --git a/src/tarski/syntax/temporal/ltl.py b/src/tarski/syntax/temporal/ltl.py index 5cc5a546..bc7e870e 100644 --- a/src/tarski/syntax/temporal/ltl.py +++ b/src/tarski/syntax/temporal/ltl.py @@ -1,7 +1,7 @@ - from enum import Enum + from ... import errors as err -from ..formulas import Formula, Connective, CompoundFormula, lor +from ..formulas import CompoundFormula, Connective, Formula, lor class TemporalConnective(Enum): @@ -18,23 +18,23 @@ def __init(self, conn, sub): def _check_well_formed(self): if any(not isinstance(f, Formula) for f in self.subformulas): - raise err.LanguageError("Wrong argument types for compound formula: '{}' ".format(self.subformulas)) + raise err.LanguageError(f"Wrong argument types for compound formula: '{self.subformulas}' ") if self.connective == Connective.Not or \ self.connective in (TemporalConnective.X, TemporalConnective.F, TemporalConnective.G): if len(self.subformulas) != 1: - raise err.LanguageError("{} admits only one subformula".format(str(self.connective))) + raise err.LanguageError(f"{str(self.connective)} admits only one subformula") elif len(self.subformulas) < 2: - raise err.LanguageError("{} requires at least two subformulas".format(str(self.connective))) + raise err.LanguageError(f"{str(self.connective)} requires at least two subformulas") def __str__(self): if self.connective == Connective.Not or \ self.connective in (TemporalConnective.X, TemporalConnective.F, TemporalConnective.G): assert len(self.subformulas) == 1 - return "{} ({})".format(self.connective, str(self.subformulas[0])) + return f"{self.connective} ({str(self.subformulas[0])})" - inner = " {} ".format(self.connective).join(str(f) for f in self.subformulas) - return "({})".format(inner) + inner = f" {self.connective} ".join(str(f) for f in self.subformulas) + return f"({inner})" def X(arg): diff --git a/src/tarski/syntax/terms.py b/src/tarski/syntax/terms.py index 85d15b0c..85dbb036 100644 --- a/src/tarski/syntax/terms.py +++ b/src/tarski/syntax/terms.py @@ -1,9 +1,9 @@ from typing import Tuple -from .util import termlists_are_equal, termlist_hash -from .sorts import Sort, parent, Interval from .. import errors as err -from .builtins import BuiltinPredicateSymbol, BuiltinFunctionSymbol +from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol +from .sorts import Interval, Sort, parent +from .util import termlist_hash, termlists_are_equal class Term: @@ -152,7 +152,7 @@ def __str__(self): return str(self.symbol) def __repr__(self): - return '{} ({})'.format(self.symbol, self.sort.name) + return f'{self.symbol} ({self.sort.name})' def hash(self): return hash((self.symbol, self.sort.name)) @@ -197,7 +197,8 @@ def sort(self): return self.symbol.codomain def __str__(self): - return '{}({})'.format(self.symbol.symbol, ', '.join(str(t) for t in self.subterms)) + args = ', '.join(str(t) for t in self.subterms) + return f'{self.symbol.symbol}({args})' __repr__ = __str__ @@ -258,20 +259,21 @@ def __init__(self, condition, subterms: Tuple[Term, Term]): if len(subterms) != 2: raise err.ArityMismatch('IfThenElse', subterms, msg='IfThenElse term needs exactly two sub terms') - self.symbol = subterms[0].language.get('ite') + left, right = subterms + + self.symbol = left.language.get('ite') self.condition = condition # Our implementation of ite requires both branches to have equal sort - if subterms[0].sort != subterms[1].sort: - if parent(subterms[0].sort) == subterms[1].sort: - self._sort = subterms[1].sort - elif parent(subterms[1].sort) == subterms[0].sort: - self._sort = subterms[0].sort + if left.sort != right.sort: + if parent(left.sort) == right.sort: + self._sort = right.sort + elif parent(right.sort) == left.sort: + self._sort = left.sort else: - raise err.SyntacticError( - msg='IfThenElse: both subterms need to be of the same sort! lhs: "{}"({}), rhs: "{}"({})'.format( - subterms[0], subterms[0].sort, subterms[1], subterms[1].sort)) + raise err.SyntacticError('IfThenElse: Mismatching subterm sort. ' + f'lhs: "{left}"({left.sort}), rhs: "{right}"({right.sort})') else: - self._sort = subterms[0].sort + self._sort = left.sort self.subterms = tuple(subterms) @@ -333,7 +335,7 @@ def __str__(self): return str(self.name) def __repr__(self): - return '{} ({})'.format(self.name, self.sort.name) + return f'{self.name} ({self.sort.name})' def hash(self): return hash(self.signature) diff --git a/src/tarski/syntax/transform/__init__.py b/src/tarski/syntax/transform/__init__.py index 1bd491ca..10a97565 100644 --- a/src/tarski/syntax/transform/__init__.py +++ b/src/tarski/syntax/transform/__init__.py @@ -1,6 +1,22 @@ - -from .nnf import NNFTransformation, to_negation_normal_form from .cnf import CNFTransformation, to_conjunctive_normal_form -from .prenex import PrenexTransformation, to_prenex_negation_normal_form -from .quantifier_elimination import QuantifierElimination, QuantifierEliminationMode, remove_quantifiers from .neg_builtin import NegatedBuiltinAbsorption +from .nnf import NNFTransformation, to_negation_normal_form +from .prenex import PrenexTransformation, to_prenex_negation_normal_form +from .quantifier_elimination import (QuantifierElimination, + QuantifierEliminationMode, + remove_quantifiers) + +__all__ = [ + 'CNFTransformation', + 'NNFTransformation', + 'NegatedBuiltinAbsorption', + 'PrenexTransformation', + 'QuantifierElimination', + 'QuantifierEliminationMode', + 'errors', + 'remove_quantifiers', + 'substitutions', + 'to_conjunctive_normal_form', + 'to_negation_normal_form', + 'to_prenex_negation_normal_form' +] diff --git a/src/tarski/syntax/transform/action_grounding.py b/src/tarski/syntax/transform/action_grounding.py index 09b14900..52156c5b 100644 --- a/src/tarski/syntax/transform/action_grounding.py +++ b/src/tarski/syntax/transform/action_grounding.py @@ -1,6 +1,6 @@ -from ...fstrips.representation import substitute_expression -from ...syntax import symref, Constant, create_substitution, VariableBinding from ...fstrips.action import Action, PlainOperator +from ...fstrips.representation import substitute_expression +from ...syntax import Constant, VariableBinding, create_substitution, symref def ground_schema_into_plain_operator(action: Action, substitution): diff --git a/src/tarski/syntax/transform/cnf.py b/src/tarski/syntax/transform/cnf.py index 487892a3..3d6e1ab4 100644 --- a/src/tarski/syntax/transform/cnf.py +++ b/src/tarski/syntax/transform/cnf.py @@ -1,9 +1,8 @@ """ CNF Transformation """ +from .nnf import to_negation_normal_form from ..formulas import CompoundFormula, Connective, QuantifiedFormula -from ..transform import to_negation_normal_form - from .errors import TransformationError diff --git a/src/tarski/syntax/transform/neg_builtin.py b/src/tarski/syntax/transform/neg_builtin.py index c532acf9..a322fffe 100644 --- a/src/tarski/syntax/transform/neg_builtin.py +++ b/src/tarski/syntax/transform/neg_builtin.py @@ -3,8 +3,8 @@ """ import copy -from ..formulas import Connective, Atom, QuantifiedFormula, CompoundFormula from ..builtins import negate_builtin_atom +from ..formulas import Atom, CompoundFormula, Connective, QuantifiedFormula class NegatedBuiltinAbsorption: diff --git a/src/tarski/syntax/transform/nnf.py b/src/tarski/syntax/transform/nnf.py index 945dff29..97efd119 100644 --- a/src/tarski/syntax/transform/nnf.py +++ b/src/tarski/syntax/transform/nnf.py @@ -4,8 +4,9 @@ import copy from ... import errors as err -from ..formulas import neg, Formula, QuantifiedFormula, CompoundFormula, Connective, negate_quantifier, Tautology, \ - Contradiction, Atom +from ..formulas import (Atom, CompoundFormula, Connective, Contradiction, + Formula, QuantifiedFormula, Tautology, neg, + negate_quantifier) class NNFTransformation: diff --git a/src/tarski/syntax/transform/prenex.py b/src/tarski/syntax/transform/prenex.py index 37b6b2e5..5781f3f6 100644 --- a/src/tarski/syntax/transform/prenex.py +++ b/src/tarski/syntax/transform/prenex.py @@ -1,12 +1,12 @@ """ Rewrite formulas into prenex negation normal form """ -from .substitutions import substitute_expression +from ..formulas import (CompoundFormula, Connective, QuantifiedFormula, + Quantifier, lor) from ..symrefs import symref -from ..formulas import CompoundFormula, QuantifiedFormula, Connective, Quantifier, lor from ..transform.nnf import NNFTransformation - from .errors import TransformationError +from .substitutions import substitute_expression class PrenexTransformation: @@ -28,7 +28,7 @@ def _merge_quantified_subformulas(self, lhs, rhs, renaming=True): new_variables[key_y] = y else: if renaming: - y2 = self.L.variable("{}'".format(y.symbol), y.sort) + y2 = self.L.variable(f"{y.symbol}'", y.sort) subst[y] = y2 new_variables[(y2.symbol, y2.sort.name)] = y2 if len(subst) > 0: @@ -49,7 +49,7 @@ def _nest_quantifiers(self, out_q, out_vars, out_phi, inner_q, inner_vars, conn, if key_y not in in_vars_dict: new_out_vars.append(y) else: - y2 = self.L.variable("{}'".format(y.symbol), y.sort) + y2 = self.L.variable(f"{y.symbol}'", y.sort) subst[symref(y)] = y2 new_out_vars.append(y2) if len(subst) > 0: diff --git a/src/tarski/syntax/transform/quantifier_elimination.py b/src/tarski/syntax/transform/quantifier_elimination.py index f79ee892..156769c9 100644 --- a/src/tarski/syntax/transform/quantifier_elimination.py +++ b/src/tarski/syntax/transform/quantifier_elimination.py @@ -6,9 +6,10 @@ from enum import Enum from ... import errors as err -from .substitutions import create_substitution, substitute_expression -from ..formulas import land, lor, Quantifier, QuantifiedFormula, Atom, Tautology, Contradiction, CompoundFormula +from ..formulas import (Atom, CompoundFormula, Contradiction, + QuantifiedFormula, Quantifier, Tautology, land, lor) from .errors import TransformationError +from .substitutions import create_substitution, substitute_expression class QuantifierEliminationMode(Enum): @@ -59,7 +60,8 @@ def _recurse(self, phi): def _expand(self, phi: QuantifiedFormula, creator): # Avoiding circular references in the import: - from ...grounding.naive import instantiation # pylint: disable=import-outside-toplevel + from ...grounding.naive import \ + instantiation # pylint: disable=import-outside-toplevel card, syms, substs = instantiation.enumerate_groundings(phi.variables) if card == 0: raise TransformationError("quantifier elimination", phi, "No constants were defined!") diff --git a/src/tarski/syntax/transform/simplifications.py b/src/tarski/syntax/transform/simplifications.py index 88280dcf..a861df4d 100644 --- a/src/tarski/syntax/transform/simplifications.py +++ b/src/tarski/syntax/transform/simplifications.py @@ -1,7 +1,5 @@ - -from ...syntax import Atom, CompoundFormula, Connective, Constant, CompoundTerm +from ...syntax import Atom, CompoundFormula, CompoundTerm, Connective, Constant from ...syntax.builtins import BuiltinPredicateSymbol - from .errors import TransformationError diff --git a/src/tarski/syntax/transform/substitutions.py b/src/tarski/syntax/transform/substitutions.py index 39bdcec4..31b2e5cc 100644 --- a/src/tarski/syntax/transform/substitutions.py +++ b/src/tarski/syntax/transform/substitutions.py @@ -1,9 +1,6 @@ - import itertools from typing import List -from multipledispatch import dispatch # type: ignore - from ..symrefs import symref from ..terms import Variable from ..walker import FOLWalker @@ -15,8 +12,7 @@ def __init__(self, substitution): super().__init__(raise_on_undefined=False) self.substitution = substitution - @dispatch(object) - def visit(self, node): # pylint: disable-msg=E0102 + def visit(self, node): x = self.substitution.get(symref(node)) return node if x is None else x diff --git a/src/tarski/syntax/util.py b/src/tarski/syntax/util.py index af886c80..551c2f66 100644 --- a/src/tarski/syntax/util.py +++ b/src/tarski/syntax/util.py @@ -1,4 +1,3 @@ - import itertools diff --git a/src/tarski/syntax/visitors.py b/src/tarski/syntax/visitors.py index e35cb02b..090f6fb4 100644 --- a/src/tarski/syntax/visitors.py +++ b/src/tarski/syntax/visitors.py @@ -1,5 +1,5 @@ - -from tarski.syntax import symref, QuantifiedFormula, CompoundTerm, Variable, CompoundFormula, Atom +from tarski.syntax import (Atom, CompoundFormula, CompoundTerm, + QuantifiedFormula, Variable, symref) from tarski.syntax.formulas import is_eq_atom @@ -92,4 +92,3 @@ def visit(self, phi): elif isinstance(phi, Atom): if is_eq_atom(phi): self.atoms.add(symref(phi)) - return diff --git a/src/tarski/syntax/walker.py b/src/tarski/syntax/walker.py index a1fae2ac..f5023708 100644 --- a/src/tarski/syntax/walker.py +++ b/src/tarski/syntax/walker.py @@ -27,16 +27,10 @@ def __str__(self): class FOLWalker: - """ This is an experimental implementation of a visitor pattern based on single-dispatch. - At the moment we're using the "multipledispatch" package to implement single-argument dispatching. - It's far from perfect; it requires that the subclass declares the following "default" method: - - >>> @dispatch(object) - >>> def visit(self, node): # pylint: disable-msg=E0102 - >>> return self.default_handler(node) - - Whenever we move to support Python 3.8+, we could directly use: - https://docs.python.org/3/library/functools.html#functools.singledispatchmethod + """ + This is an experimental implementation of a visitor pattern based on single-dispatch. + To use it, you need to subclass it and "overload" the `visit` function using the + `functools.singledispatchmethod` decorator, as it is done, for instance, in the class AllSymbolWalker. """ def __init__(self, raise_on_undefined=False): self.default_handler = self._raise if raise_on_undefined else self._donothing @@ -58,8 +52,10 @@ def run(self, expression, inplace=True): def visit_expression(self, node, inplace=True): # pylint: disable=import-outside-toplevel # Avoiding circular references - from .formulas import CompoundFormula, QuantifiedFormula, Atom, Tautology, Contradiction - from .terms import Constant, Variable, CompoundTerm, IfThenElse # pylint: disable=import-outside-toplevel + from .formulas import (Atom, CompoundFormula, Contradiction, + QuantifiedFormula, Tautology) + from .terms import ( # pylint: disable=import-outside-toplevel + CompoundTerm, Constant, IfThenElse, Variable) node = node if inplace else copy.deepcopy(node) if isinstance(node, (Variable, Constant, Contradiction, Tautology)): diff --git a/src/tarski/theories.py b/src/tarski/theories.py index 0c53d95b..faa0ace0 100644 --- a/src/tarski/theories.py +++ b/src/tarski/theories.py @@ -1,14 +1,15 @@ """ Management of the theories (e.g. equality, etc.) associated to the FO languages """ from enum import Enum -from typing import Union, List, Optional +from typing import List, Optional, Union from tarski.errors import DuplicateTheoryDefinition -from .syntax.sorts import attach_arithmetic_sorts, build_the_bools + +from . import errors as err from .fol import FirstOrderLanguage -from .syntax import builtins, Term -from .syntax.factory import create_atom, create_arithmetic_term +from .syntax import Term, builtins +from .syntax.factory import create_arithmetic_term, create_atom from .syntax.ops import cast_to_closest_common_numeric_ancestor -from . import errors as err +from .syntax.sorts import attach_arithmetic_sorts, build_the_bools class Theory(Enum): diff --git a/src/tarski/util.py b/src/tarski/util.py index d3642548..293b1810 100644 --- a/src/tarski/util.py +++ b/src/tarski/util.py @@ -29,7 +29,7 @@ def dump(self): return [str(o) for o in self.data.keys()] def __str__(self): - return ','.join('{}: {}'.format(idx, o) for o, idx in self.data.items()) + return ','.join(f'{idx}: {o}' for o, idx in self.data.items()) __repr__ = __str__ diff --git a/src/tarski/utils/__init__.py b/src/tarski/utils/__init__.py index fe120bde..2dfe5a1c 100644 --- a/src/tarski/utils/__init__.py +++ b/src/tarski/utils/__init__.py @@ -1,2 +1,3 @@ - from .helpers import parse_model + +__all__ = ['parse_model'] diff --git a/src/tarski/utils/algorithms.py b/src/tarski/utils/algorithms.py index 3942237a..c9e0cd05 100644 --- a/src/tarski/utils/algorithms.py +++ b/src/tarski/utils/algorithms.py @@ -1,11 +1,11 @@ import sys -from collections import deque, defaultdict +from collections import defaultdict, deque def transitive_closure(elements): closure = set(elements) while True: - closure_until_now = closure | set((x, w) for x, y in closure for q, w in closure if q == y) + closure_until_now = closure | {(x, w) for x, y in closure for q, w in closure if q == y} if len(closure_until_now) == len(closure): break diff --git a/src/tarski/utils/command.py b/src/tarski/utils/command.py index 5956535a..c5e2a58f 100644 --- a/src/tarski/utils/command.py +++ b/src/tarski/utils/command.py @@ -1,12 +1,7 @@ import errno import logging -import subprocess -from contextlib import contextmanager -import ctypes -import io import os -import sys -import tempfile +import subprocess def count_file_lines(filename): # Might be a bit faster with a call to "wc -l" @@ -42,9 +37,9 @@ def execute(command, **kwargs): msg = 'Executing "{}" on directory "{}"'.format(' '.join(command), cwd) if stdout: - msg += '. Standard output redirected to "{}"'.format(stdout.name) + msg += f'. Standard output redirected to "{stdout.name}"' if stderr: - msg += '. Standard error redirected to "{}"'.format(stderr.name) + msg += f'. Standard error redirected to "{stderr.name}"' logging.debug(msg) retcode = subprocess.call(command, cwd=cwd, stdout=stdout, stderr=stderr) diff --git a/src/tarski/utils/hashing.py b/src/tarski/utils/hashing.py index 223c85eb..af3c65be 100644 --- a/src/tarski/utils/hashing.py +++ b/src/tarski/utils/hashing.py @@ -1,4 +1,3 @@ - import hashlib import sys diff --git a/src/tarski/utils/resources.py b/src/tarski/utils/resources.py index d205eac0..a9867eef 100644 --- a/src/tarski/utils/resources.py +++ b/src/tarski/utils/resources.py @@ -39,7 +39,7 @@ def __str__(self): current_in_mb = current / (1024 * 1024) rss_in_mb = (current - self.start_mem) / (1024 * 1024) - return "[%.2fs CPU, %.2fs wall-clock, diff: %.2fMB, curr: %.2fMB]" % ( + return "[{:.2f}s CPU, {:.2f}s wall-clock, diff: {:.2f}MB, curr: {:.2f}MB]".format( self._clock() - self.start_clock, time.time() - self.start_time, rss_in_mb, current_in_mb) diff --git a/src/tarski/utils/serialization.py b/src/tarski/utils/serialization.py index 4fef28db..818f7d3a 100644 --- a/src/tarski/utils/serialization.py +++ b/src/tarski/utils/serialization.py @@ -1,5 +1,3 @@ - - def serialize_atom(atom): """ Return a comma-separated serialization of a given atom, e.g. from atom "on(a,b)", it will return "on,a,b". """ if not atom.subterms: diff --git a/src/tarski/version.py b/src/tarski/version.py index 4720c1b3..6d389260 100644 --- a/src/tarski/version.py +++ b/src/tarski/version.py @@ -1,3 +1,2 @@ - __version_info__ = (0, 7, 0) __version__ = '.'.join(map(str, __version_info__)) diff --git a/tests/analysis/test_csp.py b/tests/analysis/test_csp.py index ccf31833..0e61a219 100644 --- a/tests/analysis/test_csp.py +++ b/tests/analysis/test_csp.py @@ -1,7 +1,8 @@ """ Tests for the CSP analysis module """ -from tarski.analysis.csp import compute_schema_constraint_hypergraph, check_hypergraph_acyclicity +from tarski.analysis.csp import (check_hypergraph_acyclicity, + compute_schema_constraint_hypergraph) from tests.io.common import parse_benchmark_instance diff --git a/tests/benchmarks/test_benchmarks.py b/tests/benchmarks/test_benchmarks.py index cd224234..76c002d5 100644 --- a/tests/benchmarks/test_benchmarks.py +++ b/tests/benchmarks/test_benchmarks.py @@ -1,4 +1,5 @@ -from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem +from tarski.benchmarks.blocksworld import ( + generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem) from tarski.benchmarks.counters import generate_fstrips_counters_problem from tarski.syntax import is_and diff --git a/tests/common/benchmarks.py b/tests/common/benchmarks.py index 2d1a9e92..6e50e008 100644 --- a/tests/common/benchmarks.py +++ b/tests/common/benchmarks.py @@ -1,5 +1,3 @@ - - def get_lenient_benchmarks(): """ Return a list of IPC domains that require non-strict PDDL parsing because of missing requirement flags or similar minor bugs, which would result in a parsing error. """ diff --git a/tests/common/blocksworld.py b/tests/common/blocksworld.py index b5341fda..8036b2ae 100644 --- a/tests/common/blocksworld.py +++ b/tests/common/blocksworld.py @@ -17,7 +17,7 @@ def generate_bw_loc_and_clear(num_blocks): # Table and blocks lang.constant('table', lang.Object) lang.constant('hand', lang.Object) - _ = [lang.constant('b{}'.format(k), lang.Object) for k in range(1, num_blocks + 1)] + _ = [lang.constant(f'b{k}', lang.Object) for k in range(1, num_blocks + 1)] return lang @@ -27,7 +27,7 @@ def create_4blocks_task(): loc = bw.get_function('loc') clear = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) table = bw.get_constant('table') hand = bw.get_constant('hand') diff --git a/tests/common/gridworld.py b/tests/common/gridworld.py index b7933f0f..71bb416d 100644 --- a/tests/common/gridworld.py +++ b/tests/common/gridworld.py @@ -1,7 +1,6 @@ - from tarski.fstrips import create_fstrips_problem, language -from tarski.theories import Theory from tarski.syntax import Tautology, land +from tarski.theories import Theory def generate_small_gridworld(): diff --git a/tests/common/gripper.py b/tests/common/gripper.py index 5a687d9c..e81853b7 100644 --- a/tests/common/gripper.py +++ b/tests/common/gripper.py @@ -1,4 +1,3 @@ - import tarski.model from tarski import fstrips as fs from tarski.syntax import land @@ -46,7 +45,7 @@ def create_sample_problem(): problem.init = init problem.goal = land(at(ball1, roomb), at(ball2, roomb), at(ball3, roomb), at(ball4, roomb)) - from_, to, o, r, g = [lang.variable(x, lang.Object) for x in ["from", "to", "o", "r", "g"]] + from_, to, o, r, g = (lang.variable(x, lang.Object) for x in ["from", "to", "o", "r", "g"]) problem.action("move", [from_, to], precondition=land(from_ != to, room(from_), room(to), at_robby(from_), flat=True), diff --git a/tests/common/numeric.py b/tests/common/numeric.py index 0c7a6449..5b38393b 100644 --- a/tests/common/numeric.py +++ b/tests/common/numeric.py @@ -17,7 +17,7 @@ def generate_numeric_instance(): # Particles for k in (1, 2, 3, 4): - lang.constant('p{}'.format(k), particle) + lang.constant(f'p{k}', particle) return lang @@ -33,7 +33,7 @@ def generate_billiards_instance(): # The stuff lang.balls = [] for k in (1, 2): - bk = lang.constant('ball_{}'.format(k), ball) + bk = lang.constant(f'ball_{k}', ball) lang.balls.append(bk) lang.dimensions = [] diff --git a/tests/common/parcprinter.py b/tests/common/parcprinter.py index 3aa6105c..e5140ff7 100644 --- a/tests/common/parcprinter.py +++ b/tests/common/parcprinter.py @@ -3,10 +3,10 @@ """ import tarski as tsk import tarski.model -from tarski.theories import Theory from tarski import fstrips as fs -from tarski.syntax import top, land from tarski.evaluators.simple import evaluate +from tarski.syntax import land, top +from tarski.theories import Theory def create_small_language(): diff --git a/tests/common/simple.py b/tests/common/simple.py index 9f561f7b..1a5330c0 100644 --- a/tests/common/simple.py +++ b/tests/common/simple.py @@ -1,4 +1,3 @@ - import tarski.model from tarski import fstrips as fs from tarski.syntax import land, neg diff --git a/tests/common/spider.py b/tests/common/spider.py index c5f9c350..f2826abd 100644 --- a/tests/common/spider.py +++ b/tests/common/spider.py @@ -1,4 +1,3 @@ - from tarski.fstrips import fstrips from tarski.theories import Theory diff --git a/tests/dl/test_concepts.py b/tests/dl/test_concepts.py index aeda5de9..67293f2f 100644 --- a/tests/dl/test_concepts.py +++ b/tests/dl/test_concepts.py @@ -4,8 +4,10 @@ import pytest import tarski.benchmarks.blocksworld -from tarski.dl import SyntacticFactory, PrimitiveRole, PrimitiveConcept, NominalConcept, StarRole, InverseRole, \ - ArityDLMismatch +from tarski.dl import (ArityDLMismatch, InverseRole, NominalConcept, + PrimitiveConcept, PrimitiveRole, StarRole, + SyntacticFactory) + from ..common import blocksworld diff --git a/tests/fol/test_interpretations.py b/tests/fol/test_interpretations.py index 96c781d1..86b282ba 100644 --- a/tests/fol/test_interpretations.py +++ b/tests/fol/test_interpretations.py @@ -1,21 +1,19 @@ +import pytest import tarski import tarski.benchmarks.blocksworld import tarski.model +from tarski import errors, modules +from tarski.evaluators.simple import evaluate from tarski.fstrips import language from tarski.model import Model -from tarski import errors - -from ..common import numeric -from tarski.evaluators.simple import evaluate from tarski.syntax import Constant, ite, symref from tarski.theories import Theory -from tarski.modules import import_scipy_special -import pytest +from ..common import numeric try: - sci = import_scipy_special() + sp = modules.scipy_special except ImportError: pytest.skip('Please install the "arithmetic" extra to run the full suite of tests', allow_module_level=True) @@ -138,6 +136,7 @@ def test_special_function_abs(): def test_special_function_pow(): import numpy as np + from tarski.syntax.arithmetic import pow lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -149,6 +148,7 @@ def test_special_function_pow(): def test_special_function_sin(): import numpy as np + from tarski.syntax.arithmetic.special import sin lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -160,6 +160,7 @@ def test_special_function_sin(): def test_special_function_sqrt(): import numpy as np + from tarski.syntax.arithmetic import sqrt lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -171,6 +172,7 @@ def test_special_function_sqrt(): def test_special_function_cos(): import numpy as np + from tarski.syntax.arithmetic.special import cos lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -182,6 +184,7 @@ def test_special_function_cos(): def test_special_function_tan(): import numpy as np + from tarski.syntax.arithmetic.special import tan lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -193,6 +196,7 @@ def test_special_function_tan(): def test_special_function_atan(): import numpy as np + from tarski.syntax.arithmetic.special import atan lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -204,6 +208,7 @@ def test_special_function_atan(): def test_special_function_exp(): import numpy as np + from tarski.syntax.arithmetic.special import exp lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -215,6 +220,7 @@ def test_special_function_exp(): def test_special_function_log(): import numpy as np + from tarski.syntax.arithmetic.special import log lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -231,7 +237,7 @@ def test_special_function_erf(): model.evaluator = evaluate reals = lang.Real x = lang.constant(0.5, reals) - assert model[erf(x)].symbol == sci.erf(0.5) + assert model[erf(x)].symbol == sp.erf(0.5) def test_special_function_erfc(): @@ -241,11 +247,12 @@ def test_special_function_erfc(): model.evaluator = evaluate reals = lang.Real x = lang.constant(0.5, reals) - assert model[erfc(x)].symbol == sci.erfc(0.5) + assert model[erfc(x)].symbol == sp.erfc(0.5) def test_special_function_sgn(): import numpy as np + from tarski.syntax.arithmetic.special import sgn lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -257,6 +264,7 @@ def test_special_function_sgn(): def test_random_function_normal(): import numpy as np + from tarski.syntax.arithmetic.random import normal np.random.seed(1234) # for repeatability lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL, Theory.RANDOM]) @@ -272,6 +280,7 @@ def test_random_function_normal(): def test_random_function_gamma(): import numpy as np + from tarski.syntax.arithmetic.random import gamma np.random.seed(1234) # for repeatability lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL, Theory.RANDOM]) @@ -287,6 +296,7 @@ def test_random_function_gamma(): def test_arcsin(): import numpy as np + from tarski.syntax.arithmetic.special import asin lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) diff --git a/tests/fol/test_sorts.py b/tests/fol/test_sorts.py index 94d498ad..4fe1fa24 100644 --- a/tests/fol/test_sorts.py +++ b/tests/fol/test_sorts.py @@ -6,7 +6,8 @@ from tarski.benchmarks.counters import generate_fstrips_counters_problem from tarski.syntax import symref from tarski.syntax.ops import compute_sort_id_assignment -from tarski.syntax.sorts import parent, ancestors, compute_signature_bindings, compute_direct_sort_map +from tarski.syntax.sorts import (ancestors, compute_direct_sort_map, + compute_signature_bindings, parent) from tarski.theories import Theory diff --git a/tests/fol/test_syntactic_analysis.py b/tests/fol/test_syntactic_analysis.py index 2fa54813..aaec7bb9 100644 --- a/tests/fol/test_syntactic_analysis.py +++ b/tests/fol/test_syntactic_analysis.py @@ -1,6 +1,7 @@ - -from tarski.syntax import neg, land, lor, exists, symref, forall, Variable, Constant, Atom -from tarski.syntax.ops import free_variables, flatten, collect_unique_nodes, all_variables +from tarski.syntax import (Atom, Constant, Variable, exists, forall, land, lor, + neg, symref) +from tarski.syntax.ops import (all_variables, collect_unique_nodes, flatten, + free_variables) from tests.common import tarskiworld from tests.common.blocksworld import generate_bw_loc_and_clear diff --git a/tests/fol/test_syntax.py b/tests/fol/test_syntax.py index 0601ae0d..ff8d2fa2 100755 --- a/tests/fol/test_syntax.py +++ b/tests/fol/test_syntax.py @@ -1,18 +1,19 @@ - import copy from collections import defaultdict import pytest -from tarski import theories, Term, Constant -from tarski.benchmarks.blocksworld import generate_strips_bw_language -from tarski.fstrips import fstrips -from tarski.syntax import symref, CompoundFormula, Atom, ite, AggregateCompoundTerm, CompoundTerm, lor, Tautology, \ - Contradiction, land, top, bot -from tarski.theories import Theory +from tarski import Constant, Term from tarski import errors as err from tarski import fstrips as fs +from tarski import theories +from tarski.benchmarks.blocksworld import generate_strips_bw_language +from tarski.fstrips import fstrips +from tarski.syntax import (AggregateCompoundTerm, Atom, CompoundFormula, + CompoundTerm, Contradiction, Tautology, bot, ite, + land, lor, symref, top) from tarski.syntax.algebra import Matrix +from tarski.theories import Theory from ..common import numeric diff --git a/tests/fol/test_variable_bindings.py b/tests/fol/test_variable_bindings.py index 0ead741d..f96140e0 100644 --- a/tests/fol/test_variable_bindings.py +++ b/tests/fol/test_variable_bindings.py @@ -1,5 +1,3 @@ - - def test_object_type(): pass # TODO diff --git a/tests/fstrips/contingent/localize.py b/tests/fstrips/contingent/localize.py index 71e29fa6..bd80ed8d 100644 --- a/tests/fstrips/contingent/localize.py +++ b/tests/fstrips/contingent/localize.py @@ -2,7 +2,6 @@ from tarski import fstrips as fs from tarski.fstrips import contingent from tarski.syntax import * - from tests.common.grid_navigation import generate_single_agent_language diff --git a/tests/fstrips/contingent/test_sensors.py b/tests/fstrips/contingent/test_sensors.py index 5f213af5..346b1a37 100644 --- a/tests/fstrips/contingent/test_sensors.py +++ b/tests/fstrips/contingent/test_sensors.py @@ -2,7 +2,6 @@ from tarski.fstrips import contingent from tarski.syntax import * - from tests.common import grid_navigation from tests.fstrips.contingent import localize diff --git a/tests/fstrips/hybrid/tasks.py b/tests/fstrips/hybrid/tasks.py index c897d159..fd229706 100644 --- a/tests/fstrips/hybrid/tasks.py +++ b/tests/fstrips/hybrid/tasks.py @@ -2,8 +2,8 @@ from tarski.fstrips import hybrid from tarski.syntax import * from tarski.syntax.arithmetic import summation - -from tests.common.numeric import generate_numeric_instance, generate_billiards_instance +from tests.common.numeric import (generate_billiards_instance, + generate_numeric_instance) def create_particles_world(): @@ -11,7 +11,7 @@ def create_particles_world(): particles = generate_numeric_instance() task.language = generate_numeric_instance() - x, y, f = [particles.get_function(name) for name in ['x', 'y', 'f']] + x, y, f = (particles.get_function(name) for name in ['x', 'y', 'f']) _ = [particles.get_constant(name) for name in ['p1', 'p2', 'p3', 'p4']] p_var = Variable('p', task.language.get_sort('particle')) @@ -26,7 +26,7 @@ def create_billiards_world(): task = hybrid.Problem() lang = generate_billiards_instance() task.language = lang - m, F, a, v, p = [lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']] + m, F, a, v, p = (lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']) b = Variable('b', lang.get_sort('ball')) d = Variable('d', lang.get_sort('dimension')) diff --git a/tests/fstrips/hybrid/test_differential.py b/tests/fstrips/hybrid/test_differential.py index c89356b1..1c230371 100644 --- a/tests/fstrips/hybrid/test_differential.py +++ b/tests/fstrips/hybrid/test_differential.py @@ -1,14 +1,13 @@ from tarski.fstrips import hybrid from tarski.syntax import * - from tests.common.numeric import generate_numeric_instance def test_diff_constraint_creation(): particles = generate_numeric_instance() - x, y, f = [particles.get_function(name) for name in ['x', 'y', 'f']] - p1, p2, p3, p4 = [particles.get_constant(name) for name in ['p1', 'p2', 'p3', 'p4']] + x, y, f = (particles.get_function(name) for name in ['x', 'y', 'f']) + p1, p2, p3, p4 = (particles.get_constant(name) for name in ['p1', 'p2', 'p3', 'p4']) constraint = hybrid.DifferentialConstraint(particles, 'test', [], top, x(p1), f(p1) * 2.0) assert isinstance(constraint, hybrid.DifferentialConstraint) diff --git a/tests/fstrips/hybrid/test_reactions.py b/tests/fstrips/hybrid/test_reactions.py index 9b036d22..32a5c8b0 100644 --- a/tests/fstrips/hybrid/test_reactions.py +++ b/tests/fstrips/hybrid/test_reactions.py @@ -1,14 +1,13 @@ +import tests.common.numeric as numeric from tarski import fstrips as fs from tarski.fstrips import hybrid from tarski.syntax import * -import tests.common.numeric as numeric - def test_reaction_creation(): from tarski.syntax.arithmetic import summation lang = numeric.generate_billiards_instance() - m, F, a, v, p = [lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']] + m, F, a, v, p = (lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']) b = Variable('b', lang.get_sort('ball')) d = Variable('d', lang.get_sort('dimension')) diff --git a/tests/fstrips/test_fstrips_operations.py b/tests/fstrips/test_fstrips_operations.py index 4ab428bb..f445bbfb 100644 --- a/tests/fstrips/test_fstrips_operations.py +++ b/tests/fstrips/test_fstrips_operations.py @@ -1,10 +1,10 @@ from tarski.benchmarks.blocksworld import generate_fstrips_bw_language -from tarski.fstrips import create_fstrips_problem, AddEffect +from tarski.fstrips import AddEffect, create_fstrips_problem from tarski.fstrips.ops import collect_all_symbols from tarski.grounding.ops import approximate_symbol_fluency from tarski.syntax import top -from ..common import parcprinter, gripper +from ..common import gripper, parcprinter def test_symbol_classification_in_parcprinter(): diff --git a/tests/fstrips/test_problem_grounding.py b/tests/fstrips/test_problem_grounding.py index 4f432d86..ac5a9c76 100644 --- a/tests/fstrips/test_problem_grounding.py +++ b/tests/fstrips/test_problem_grounding.py @@ -1,15 +1,16 @@ from collections import OrderedDict -from tarski.fstrips import DelEffect, UniversalEffect, AddEffect +from tarski.benchmarks.blocksworld import generate_strips_blocksworld_problem +from tarski.fstrips import AddEffect, DelEffect, UniversalEffect from tarski.fstrips.action import PlainOperator from tarski.fstrips.representation import is_ground from tarski.grounding import ProblemGrounding -from tarski.grounding.lp_grounding import ground_problem_schemas_into_plain_operators +from tarski.grounding.lp_grounding import \ + ground_problem_schemas_into_plain_operators from tarski.syntax import symref -from tarski.syntax.transform.action_grounding import ground_schema_into_plain_operator, \ - ground_schema_into_plain_operator_from_grounding -from tarski.benchmarks.blocksworld import generate_strips_blocksworld_problem - +from tarski.syntax.transform.action_grounding import ( + ground_schema_into_plain_operator, + ground_schema_into_plain_operator_from_grounding) from tests.common import blocksworld @@ -27,7 +28,7 @@ def test_action_grounding_bw(): b1, b2, b3, clear, on, ontable, handempty, holding = \ problem.language.get('b1', 'b2', 'b3', 'clear', 'on', 'ontable', 'handempty', 'holding') unstack = problem.get_action("unstack") - x1, x2 = [symref(x) for x in unstack.parameters] # Unstack has two parameters + x1, x2 = (symref(x) for x in unstack.parameters) # Unstack has two parameters ground = ground_schema_into_plain_operator(unstack, {x1: b1, x2: b2}) # i.e. the operator unstack(b1, b2) assert isinstance(ground, PlainOperator) and \ str(ground.precondition) == '(on(b1,b2) and clear(b1) and handempty())' diff --git a/tests/fstrips/test_representation.py b/tests/fstrips/test_representation.py index bbfeaabd..90eabec3 100644 --- a/tests/fstrips/test_representation.py +++ b/tests/fstrips/test_representation.py @@ -1,15 +1,22 @@ import tarski.benchmarks.blocksworld +from tarski.benchmarks.blocksworld import ( + generate_fstrips_blocksworld_problem, generate_fstrips_bw_language, + generate_strips_blocksworld_problem) from tarski.benchmarks.counters import generate_fstrips_counters_problem -from tarski.fstrips.representation import collect_effect_free_parameters, project_away_effect_free_variables, \ - collect_effect_free_variables, project_away_effect_free_variables_from_problem, is_typed_problem, \ - identify_cost_related_functions, compute_delete_free_relaxation, is_delete_free, is_strips_problem, \ - is_conjunction_of_positive_atoms, is_strips_effect_set, compile_away_formula_negated_literals, \ - compile_action_negated_preconditions_away, compile_negated_preconditions_away, compute_complementary_atoms -from tarski.syntax import exists, land, neg, symref, substitute_expression, forall -from tarski.fstrips import representation as rep, AddEffect, DelEffect +from tarski.fstrips import AddEffect, DelEffect +from tarski.fstrips import representation as rep +from tarski.fstrips.representation import ( + collect_effect_free_parameters, collect_effect_free_variables, + compile_action_negated_preconditions_away, + compile_away_formula_negated_literals, compile_negated_preconditions_away, + compute_complementary_atoms, compute_delete_free_relaxation, + identify_cost_related_functions, is_conjunction_of_positive_atoms, + is_delete_free, is_strips_effect_set, is_strips_problem, is_typed_problem, + project_away_effect_free_variables, + project_away_effect_free_variables_from_problem) +from tarski.syntax import (exists, forall, land, neg, substitute_expression, + symref) from tarski.syntax.ops import flatten -from tarski.benchmarks.blocksworld import generate_fstrips_bw_language, generate_fstrips_blocksworld_problem, \ - generate_strips_blocksworld_problem from tests.io.common import parse_benchmark_instance @@ -284,7 +291,7 @@ def test_compute_complementary_atoms(): def test_simple_expression_substitutions(): lang = tarski.benchmarks.blocksworld.generate_strips_bw_language(nblocks=2) - clear, b1, b2 = [lang.get(name) for name in ('clear', 'b1', 'b2')] + clear, b1, b2 = (lang.get(name) for name in ('clear', 'b1', 'b2')) x, y = lang.variable('x', 'object'), lang.variable('y', 'object') formula = clear(x) diff --git a/tests/fstrips/test_simplify.py b/tests/fstrips/test_simplify.py index cb428ebc..e6a65a31 100644 --- a/tests/fstrips/test_simplify.py +++ b/tests/fstrips/test_simplify.py @@ -2,8 +2,9 @@ from tarski.benchmarks.counters import generate_fstrips_counters_problem from tarski.fstrips import UniversalEffect from tarski.fstrips.manipulation import Simplify -from tarski.fstrips.manipulation.simplify import simplify_existential_quantification -from tarski.syntax import symref, land, lor, neg, bot, top, forall, exists +from tarski.fstrips.manipulation.simplify import \ + simplify_existential_quantification +from tarski.syntax import bot, exists, forall, land, lor, neg, symref, top def test_simplifier(): @@ -11,7 +12,7 @@ def test_simplifier(): lang = problem.language value, max_int, counter, val_t, c1 = lang.get('value', 'max_int', 'counter', 'val', 'c1') x = lang.variable('x', counter) - two, three, six = [lang.constant(c, val_t) for c in (2, 3, 6)] + two, three, six = (lang.constant(c, val_t) for c in (2, 3, 6)) s = Simplify(problem, problem.init) assert symref(s.simplify_expression(x)) == symref(x) @@ -76,7 +77,7 @@ def test_simplification_pruning(): problem = generate_fstrips_counters_problem(ncounters=3) lang = problem.language value, max_int, counter, val_t, c1 = lang.get('value', 'max_int', 'counter', 'val', 'c1') - three, six = [lang.constant(c, val_t) for c in (3, 6)] + three, six = (lang.constant(c, val_t) for c in (3, 6)) s = Simplify(problem, problem.init) @@ -98,7 +99,7 @@ def test_simplification_of_ex_quantification(): value, max_int, counter, val_t, c1 = lang.get('value', 'max_int', 'counter', 'val', 'c1') x = lang.variable('x', counter) z = lang.variable('z', counter) - two, three, six = [lang.constant(c, val_t) for c in (2, 3, 6)] + two, three, six = (lang.constant(c, val_t) for c in (2, 3, 6)) phi = exists(z, land(x == z, top, value(z) < six)) assert simplify_existential_quantification(phi, inplace=False) == land(top, value(x) < six), \ diff --git a/tests/fstrips/test_symbol_classification.py b/tests/fstrips/test_symbol_classification.py index df618105..6f55370b 100644 --- a/tests/fstrips/test_symbol_classification.py +++ b/tests/fstrips/test_symbol_classification.py @@ -1,9 +1,10 @@ import pytest + from tarski.grounding.ops import approximate_symbol_fluency from tarski.syntax.util import get_symbols -from ..io.common import reader, collect_strips_benchmarks, collect_fstrips_benchmarks - +from ..io.common import (collect_fstrips_benchmarks, collect_strips_benchmarks, + reader) SAMPLE_STRIPS_INSTANCES = [ "settlers-sat18-adl:p01.pddl", diff --git a/tests/fstrips/test_walker.py b/tests/fstrips/test_walker.py index 9fdc95da..ba10abf4 100644 --- a/tests/fstrips/test_walker.py +++ b/tests/fstrips/test_walker.py @@ -1,6 +1,5 @@ from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem - # def test_fstrips_problem_walker(): # problem = generate_fstrips_blocksworld_problem( # nblocks=2, diff --git a/tests/grounding/test_lp_grounding.py b/tests/grounding/test_lp_grounding.py index f5849e25..7d6f3d0f 100644 --- a/tests/grounding/test_lp_grounding.py +++ b/tests/grounding/test_lp_grounding.py @@ -1,4 +1,5 @@ import shutil + import pytest from tarski.grounding import LPGroundingStrategy, NaiveGroundingStrategy @@ -7,10 +8,11 @@ from tarski.reachability import create_reachability_lp from tarski.syntax import neg from tests.common.benchmarks import get_lenient_benchmarks - from tests.common.gripper import create_sample_problem from tests.common.simple import create_simple_problem -from ..io.common import reader, collect_strips_benchmarks, parse_benchmark_instance + +from ..io.common import (collect_strips_benchmarks, parse_benchmark_instance, + reader) if shutil.which("gringo") is None: pytest.skip('Install the Clingo ASP solver and put the "gringo" binary on your PATH in order to test ASP-based ' diff --git a/tests/grounding/test_naive_grounding.py b/tests/grounding/test_naive_grounding.py index 7e1fecef..531dead5 100644 --- a/tests/grounding/test_naive_grounding.py +++ b/tests/grounding/test_naive_grounding.py @@ -1,17 +1,21 @@ -from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem -from tarski.grounding import ProblemGrounding, NaiveGroundingStrategy, create_all_possible_state_variables +from tarski.benchmarks.blocksworld import ( + generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem) +from tarski.grounding import (NaiveGroundingStrategy, ProblemGrounding, + create_all_possible_state_variables) from tarski.grounding.naive import instantiation -from tarski.util import SymbolIndex -from tarski.syntax import create_substitution -from tarski.grounding.naive.sensors import SensorGrounder from tarski.grounding.naive.constraints import ConstraintGrounder -from tarski.grounding.naive.diff_constraints import DifferentialConstraintGrounder +from tarski.grounding.naive.diff_constraints import \ + DifferentialConstraintGrounder from tarski.grounding.naive.reactions import ReactionGrounder +from tarski.grounding.naive.sensors import SensorGrounder +from tarski.syntax import create_substitution +from tarski.util import SymbolIndex +from tests.common import parcprinter +from tests.common.blocksworld import create_4blocks_task from ..fstrips.contingent import localize -from ..fstrips.hybrid.tasks import create_particles_world, create_billiards_world -from tests.common.blocksworld import create_4blocks_task -from tests.common import parcprinter +from ..fstrips.hybrid.tasks import (create_billiards_world, + create_particles_world) def create_small_bw_with_index(): diff --git a/tests/io/test_builtin_domains_parsing.py b/tests/io/test_builtin_domains_parsing.py index ecc73d71..69b677eb 100644 --- a/tests/io/test_builtin_domains_parsing.py +++ b/tests/io/test_builtin_domains_parsing.py @@ -2,7 +2,6 @@ from .common import reader - _this_dir = os.path.dirname(os.path.realpath(__file__)) _data_dir = os.path.join(_this_dir, "..", "data", "pddl") diff --git a/tests/io/test_fd_sas_writer.py b/tests/io/test_fd_sas_writer.py index c2894839..8bea67de 100644 --- a/tests/io/test_fd_sas_writer.py +++ b/tests/io/test_fd_sas_writer.py @@ -1,11 +1,12 @@ import pytest import tarski as tsk -from tarski.theories import Theory -from tarski.syntax import land, symref from tarski.io.sas.fd import Writer +from tarski.syntax import land, symref +from tarski.theories import Theory from tarski.util import SymbolIndex + @pytest.mark.sas def test_gripper_instance(): """ diff --git a/tests/io/test_fstrips_full_domains_parsing.py b/tests/io/test_fstrips_full_domains_parsing.py index 784601fc..b9317344 100644 --- a/tests/io/test_fstrips_full_domains_parsing.py +++ b/tests/io/test_fstrips_full_domains_parsing.py @@ -1,7 +1,11 @@ -from tarski.fstrips.representation import is_unit_cost_problem, is_unit_cost_action, is_zero_cost_action, \ - is_constant_cost_action +from tarski.fstrips.representation import (is_constant_cost_action, + is_unit_cost_action, + is_unit_cost_problem, + is_zero_cost_action) from tests.common.benchmarks import get_lenient_benchmarks -from .common import reader, collect_strips_benchmarks, collect_fstrips_benchmarks, parse_benchmark_instance + +from .common import (collect_fstrips_benchmarks, collect_strips_benchmarks, + parse_benchmark_instance, reader) # Let's make sure we can correctly parse all benchmarks from the IPC competitions in 2008, 2011, 2014, 2018. # We have chosen optimal track benchmarks, which one would expect to be the smallest between optimal / satisficing diff --git a/tests/io/test_fstrips_parsing.py b/tests/io/test_fstrips_parsing.py index 5484b346..dace74f1 100644 --- a/tests/io/test_fstrips_parsing.py +++ b/tests/io/test_fstrips_parsing.py @@ -1,15 +1,14 @@ - import pytest -from tarski.errors import UndefinedSort, UndefinedPredicate + +from tarski.errors import UndefinedPredicate, UndefinedSort from tarski.fstrips import AddEffect, FunctionalEffect from tarski.fstrips.errors import InvalidEffectError -from tarski.io.fstrips import ParsingError, FstripsReader +from tarski.io.fstrips import FstripsReader, ParsingError from tarski.syntax import Atom, CompoundFormula, Tautology from tarski.syntax.util import get_symbols from tarski.theories import Theory - from tests.common.spider import generate_spider_language -from tests.io.common import reader, parse_benchmark_instance +from tests.io.common import parse_benchmark_instance, reader def get_rule(name): @@ -107,12 +106,12 @@ def test_domain_name_parsing(): # Test a few names expected to be valid: for domain_name in ["BLOCKS", "blocS-woRlD", "blocks_world"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" _ = r.parse_string(tag, get_rule("domain")) # And a few ones expected to be invalid for domain_name in ["BL#OCKS", "@mydomain", "2ndblocksworld", "blocks2.0"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" with pytest.raises(ParsingError): _ = r.parse_string(tag, get_rule("domain")) @@ -123,12 +122,12 @@ def test_formulas(): # Test a few names expected to be valid: for domain_name in ["BLOCKS", "blocS-woRlD", "blocks_world"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" _ = r.parse_string(tag, get_rule("domain")) # And a few ones expected to be invalid for domain_name in ["BL#OCKS", "@mydomain", "2ndblocksworld", "blocks2.0"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" with pytest.raises(ParsingError): _ = r.parse_string(tag, get_rule("domain")) @@ -246,7 +245,7 @@ def test_symbol_casing(): # PDDL predicate current-deal remains unaffected _ = problem.language.get_predicate("current-deal") - assert "to-deal" in set(x.symbol for x in get_symbols(problem.language, type_="predicate", include_builtin=False)) + assert "to-deal" in {x.symbol for x in get_symbols(problem.language, type_="predicate", include_builtin=False)} SPIDER_DEAL_CARD_ACTION = """ diff --git a/tests/io/test_fstrips_writer.py b/tests/io/test_fstrips_writer.py index 24287a50..e9c73809 100644 --- a/tests/io/test_fstrips_writer.py +++ b/tests/io/test_fstrips_writer.py @@ -1,18 +1,21 @@ import tempfile -from typing import Optional, List +from typing import List, Optional import tarski.fstrips as fs from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem -from tarski.benchmarks.counters import get_counters_elements, generate_fstrips_counters_problem -from tarski.fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect +from tarski.benchmarks.counters import (generate_fstrips_counters_problem, + get_counters_elements) +from tarski.fstrips import (AddEffect, DelEffect, FunctionalEffect, + UniversalEffect) from tarski.io import FstripsWriter from tarski.io._fstrips.common import get_requirements_string -from tarski.io.fstrips import print_effects, print_effect, print_objects, print_metric, print_formula, print_term -from tarski.syntax import forall, exists, Constant +from tarski.io.fstrips import (print_effect, print_effects, print_formula, + print_metric, print_objects, print_term) +from tarski.syntax import Constant, exists, forall from tarski.theories import Theory - from tests.common import parcprinter from tests.io.common import reader + from ..common.gridworld import generate_small_gridworld @@ -65,11 +68,11 @@ def test_effect_writing(): e2 = AddEffect(clear(b1)) e3 = DelEffect(clear(b1)) - s1, s2, s3 = [print_effect(e) for e in [e1, e2, e3]] + s1, s2, s3 = (print_effect(e) for e in [e1, e2, e3]) assert s1 == "(assign (loc b1) table)" assert s2 == "(clear b1)" assert s3 == "(not (clear b1))" - assert print_effects([e1, e2, e3]) == "(and\n {}\n {}\n {})".format(s1, s2, s3) + assert print_effects([e1, e2, e3]) == f"(and\n {s1}\n {s2}\n {s3})" e4 = UniversalEffect([block_var], [AddEffect(clear(block_var))]) s4 = print_effect(e4) diff --git a/tests/io/test_pddl_parsing.py b/tests/io/test_pddl_parsing.py index 11994f2d..2acdaa28 100644 --- a/tests/io/test_pddl_parsing.py +++ b/tests/io/test_pddl_parsing.py @@ -4,8 +4,9 @@ import tempfile import pytest -from tarski.io.pddl.lexer import PDDLlex + from tarski.io.pddl import Features +from tarski.io.pddl.lexer import PDDLlex from tarski.io.pddl.parser import PDDLparser, UnsupportedFeature from tarski.syntax.visitors import CollectEqualityAtoms @@ -150,7 +151,7 @@ def test_basic_constructs(): print("Predicates", len(instance.predicates)) print("Types", len(instance.types)) print("Constants", len(instance.constants)) - print("Actions: instantaneous: {} durative: {}".format(len(instance.actions), len(instance.durative))) + print(f"Actions: instantaneous: {len(instance.actions)} durative: {len(instance.durative)}") print("Derived predicates:", len(instance.derived)) print("Initial State literals", len(instance.init)) diff --git a/tests/io/test_rddl_parsing.py b/tests/io/test_rddl_parsing.py index 5fe183c1..a0aeb771 100644 --- a/tests/io/test_rddl_parsing.py +++ b/tests/io/test_rddl_parsing.py @@ -1,5 +1,5 @@ -from tarski.syntax import * from tarski.io import rddl +from tarski.syntax import * def test_language_init_mars_rovers(): diff --git a/tests/io/test_rddl_writer.py b/tests/io/test_rddl_writer.py index 68f3cd20..17748db9 100644 --- a/tests/io/test_rddl_writer.py +++ b/tests/io/test_rddl_writer.py @@ -2,13 +2,13 @@ import tempfile import tarski -from tarski.theories import Theory -from tarski.syntax import * from tarski.io import rddl +from tarski.rddl import Task +from tarski.syntax import * from tarski.syntax.arithmetic import * -from tarski.syntax.arithmetic.special import * from tarski.syntax.arithmetic.random import * -from tarski.rddl import Task +from tarski.syntax.arithmetic.special import * +from tarski.theories import Theory def test_simple_rddl_model(): diff --git a/tests/ndl/test_temporal.py b/tests/ndl/test_temporal.py index c081fce5..31dae4a5 100644 --- a/tests/ndl/test_temporal.py +++ b/tests/ndl/test_temporal.py @@ -1,12 +1,12 @@ import pytest import tarski as tsk -from tarski.model import Model from tarski.evaluators.simple import evaluate -from tarski.syntax import forall, equiv, neg, land, exists -from tarski.theories import Theory +from tarski.model import Model from tarski.ndl import temporal -from tarski.ndl.temporal import TimedEffect, SetLiteralEffect +from tarski.ndl.temporal import SetLiteralEffect, TimedEffect +from tarski.syntax import equiv, exists, forall, land, neg +from tarski.theories import Theory def test_resource_lock_creation(): @@ -21,7 +21,7 @@ def test_resource_lock_creation(): L = tsk.language("mylang", theories=[Theory.EQUALITY, Theory.ARITHMETIC]) sensor_sort = L.sort('sensor') - camera, range, bearing = [L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')] + camera, range, bearing = (L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')) int_t = L.Integer engaged = L.function('engaged', sensor_sort, int_t) @@ -63,7 +63,7 @@ def test_action_creation(): direction = L.function('direction', platform_t, int_t) sensor_sort = L.sort('sensor') - camera, range, bearing = [L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')] + camera, range, bearing = (L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')) engaged = L.function('engaged', sensor_sort, int_t) region_t = L.sort('region') @@ -105,7 +105,7 @@ def test_instance_creation(): direction = L.function('direction', platform_t, int_t) sensor_sort = L.sort('sensor') - camera, range, bearing = [L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')] + camera, range, bearing = (L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')) engaged = L.function('engaged', sensor_sort, int_t) region_t = L.sort('region') diff --git a/tests/reachability/test_reachability_lp.py b/tests/reachability/test_reachability_lp.py index 5addd2a2..72ce363b 100644 --- a/tests/reachability/test_reachability_lp.py +++ b/tests/reachability/test_reachability_lp.py @@ -1,7 +1,8 @@ - -from tarski.reachability.asp import create_reachability_lp, LogicProgram, ReachabilityLPCompiler, LPAtom -from tarski.syntax import exists from tarski import fstrips as fs +from tarski.reachability.asp import (LogicProgram, LPAtom, + ReachabilityLPCompiler, + create_reachability_lp) +from tarski.syntax import exists from tests.io.common import parse_benchmark_instance from ..common.gripper import create_sample_problem @@ -16,7 +17,7 @@ def test_lp_compilation(): problem = create_sample_problem() lang = problem.language - x, y, z = [lang.variable(x, lang.Object) for x in ["x", "y", "z"]] + x, y, z = (lang.variable(x, lang.Object) for x in ["x", "y", "z"]) room, ball, at_robby, free, at, gripper, carry = lang.get( "room", "ball", "at-robby", "free", "at", "gripper", "carry") diff --git a/tests/sas/test_action.py b/tests/sas/test_action.py index c7bb7a27..6aea2b32 100644 --- a/tests/sas/test_action.py +++ b/tests/sas/test_action.py @@ -1,13 +1,15 @@ +from itertools import combinations, permutations + import pytest + import tarski as tsk -from tarski.theories import Theory -from tarski.syntax import land, symref -from tarski.sas import Schema, Action -from tarski.sas.temporal import TemporalAction -from tarski.sas.util import ground_temporal_action -from itertools import combinations, permutations import tarski.model from tarski.evaluators.simple import evaluate +from tarski.sas import Action, Schema +from tarski.sas.temporal import TemporalAction +from tarski.sas.util import ground_temporal_action +from tarski.syntax import land, symref +from tarski.theories import Theory @pytest.mark.sas diff --git a/tests/search/test_search_models.py b/tests/search/test_search_models.py index 975cb1d0..25eddf77 100644 --- a/tests/search/test_search_models.py +++ b/tests/search/test_search_models.py @@ -2,10 +2,12 @@ Tests for the Search module """ from tarski.benchmarks.blocksworld import generate_strips_blocksworld_problem -from tarski.grounding.lp_grounding import ground_problem_schemas_into_plain_operators -from tarski.search import GroundForwardSearchModel, BreadthFirstSearch +from tarski.grounding.lp_grounding import \ + ground_problem_schemas_into_plain_operators +from tarski.search import BreadthFirstSearch, GroundForwardSearchModel from tarski.search.model import progress -from tarski.syntax.transform.action_grounding import ground_schema_into_plain_operator_from_grounding +from tarski.syntax.transform.action_grounding import \ + ground_schema_into_plain_operator_from_grounding from tarski.utils import parse_model from tests.io.common import parse_benchmark_instance @@ -31,7 +33,7 @@ def test_forward_search_model(): moveright_op = ground_schema_into_plain_operator_from_grounding(move, ('rooma', 'roomb')) assert s1 == progress(s0, moveright_op) - successors = set(succ for op, succ in model.successors(s0)) + successors = {succ for op, succ in model.successors(s0)} assert s1 in successors # Let's test add-after-delete semantics are correctly enforced. The move(x, y) action in Gripper doesn't diff --git a/tests/test_utils.py b/tests/test_utils.py index 18659edd..08fab260 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,3 @@ - from tarski.utils import resources diff --git a/tests/transforms/test_syntax_transformations.py b/tests/transforms/test_syntax_transformations.py index 0f835ce8..981cb124 100644 --- a/tests/transforms/test_syntax_transformations.py +++ b/tests/transforms/test_syntax_transformations.py @@ -3,15 +3,16 @@ import tarski.benchmarks.blocksworld from tarski.fstrips.representation import is_quantifier_free from tarski.syntax import * -from tests.common import tarskiworld - -from tarski.syntax.transform.nnf import NNFTransformation +from tarski.syntax.transform import (CNFTransformation, + NegatedBuiltinAbsorption, + QuantifierElimination, + QuantifierEliminationMode, + remove_quantifiers) from tarski.syntax.transform.cnf import to_conjunctive_normal_form_clauses -from tarski.syntax.transform.prenex import to_prenex_negation_normal_form -from tarski.syntax.transform import CNFTransformation, QuantifierElimination, remove_quantifiers, \ - QuantifierEliminationMode -from tarski.syntax.transform import NegatedBuiltinAbsorption from tarski.syntax.transform.errors import TransformationError +from tarski.syntax.transform.nnf import NNFTransformation +from tarski.syntax.transform.prenex import to_prenex_negation_normal_form +from tests.common import tarskiworld def test_nnf_conjunction(): @@ -20,7 +21,7 @@ def test_nnf_conjunction(): _ = bw.get_sort('place') loc = bw.get_function('loc') _ = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) _ = bw.get_constant('table') phi = neg(land(loc(b1) != loc(b2), loc(b3) != loc(b4))) @@ -36,7 +37,7 @@ def test_nnf_double_negation(): _ = bw.get_sort('place') loc = bw.get_function('loc') _ = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) _ = bw.get_constant('table') phi = neg(neg(loc(b1) == loc(b2))) @@ -50,7 +51,7 @@ def test_nnf_quantifier_flips(): bw = tarski.benchmarks.blocksworld.generate_fstrips_bw_language() block = bw.get_sort('block') loc = bw.get_function('loc') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) x = bw.variable('x', block) @@ -74,7 +75,7 @@ def test_nnf_lpl_page_321_antecedent(): def test_prenex_idempotency(): bw = tarski.benchmarks.blocksworld.generate_fstrips_bw_language() loc = bw.get_function('loc') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) phi = loc(b1) == b2 assert str(to_prenex_negation_normal_form(bw, phi, do_copy=True)) == str(phi) @@ -161,7 +162,7 @@ def test_builtin_negation_absorption(): _ = bw.get_sort('place') loc = bw.get_function('loc') _ = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) _ = bw.get_constant('table') _ = bw.variable('x', block) diff --git a/tox.ini b/tox.ini index c765785e..3593b741 100644 --- a/tox.ini +++ b/tox.ini @@ -94,8 +94,6 @@ precision = 2 [flake8] max-line-length = 120 ignore = - # Let's deal with "unused imports" once we move to Python 3.7 and can do lazy importing - F401, # The preferred style now is as governed by W504, see https://www.flake8rules.com/rules/W503.html W503,