From 3118c91e2ce34df6393ee33bb79c7778ded63d0d Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 09:56:12 +0100 Subject: [PATCH 01/14] Bump up Python version specifiers --- .github/workflows/unit-tests.yml | 4 ++-- .readthedocs.yml | 2 +- docs/installation.md | 2 +- setup.py | 3 +-- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 13b8fb8f..f61fe4cc 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -27,8 +27,8 @@ jobs: # this way, failuares are treated as a warning and don’t fail the whole workflow. This is sometimes referred to # as a "shadow CI job". # pypy is currently disabled, as it takes a long time to run (>20mins) -# python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.9'] - python-version: ['3.7', '3.8', '3.9', '3.10'] +# python-version: ['3.8', '3.9', '3.10', 'pypy-3.9'] + python-version: ['3.8', '3.9', '3.10'] experimental: [false] include: - os: ubuntu-latest diff --git a/.readthedocs.yml b/.readthedocs.yml index c97d251a..c5c6292e 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -15,6 +15,6 @@ formats: # Optionally set the version of Python and requirements required to build your docs python: - version: 3.7 + version: "3.9" install: - requirements: docs/requirements.txt diff --git a/docs/installation.md b/docs/installation.md index ca9a6445..8837c31c 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -2,7 +2,7 @@ # Installing Tarski ## Software Requirements -Tarski is mostly developed in Python, and requires a working Python>=3.6 installation. +Tarski is mostly developed in Python, and requires a working Python>=3.8 installation. We strongly recommend installing Tarski within a Python [virtual environment](https://docs.python.org/3/tutorial/venv.html). The installation instructions below will install for you any additional diff --git a/setup.py b/setup.py index 5c5137dd..7fd6d9e3 100644 --- a/setup.py +++ b/setup.py @@ -41,7 +41,6 @@ def main(): "License :: OSI Approved :: Apache Software License", 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', @@ -50,7 +49,7 @@ def main(): packages=find_packages('src'), # include all packages under src package_dir={'': 'src'}, # tell distutils packages are under src - python_requires='>=3.6', # supported Python ranges + python_requires='>=3.8', # supported Python ranges install_requires=[ # psutil not supported on Windows, we haven't tested in other platforms, but since it's not essential # to the functioning of Tarski, better be conservative here and install only on Linux. From 4cdc0f002b96af34529a4f7df0a638bb0ef5eb9d Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 10:00:10 +0100 Subject: [PATCH 02/14] Add explicit Python version check --- src/tarski/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/tarski/__init__.py b/src/tarski/__init__.py index f5947707..59ef90fe 100644 --- a/src/tarski/__init__.py +++ b/src/tarski/__init__.py @@ -1,5 +1,7 @@ import logging +import sys as sys + from .version import __version__, __version_info__ from .fol import FirstOrderLanguage from .theories import language @@ -10,5 +12,9 @@ logging.getLogger(__name__).addHandler(logging.NullHandler()) + +if sys.version_info < (3, 8, 0): + raise OSError(f'Tarski requires Python>=3.8, but yours is {sys.version_info}') + __all__ = ['__version__', '__version_info__', 'FirstOrderLanguage', 'language', 'Theories', 'Function', 'Predicate', 'Formula', 'Term', 'Constant', 'Variable', 'LanguageError', 'fstrips'] From bf4febcf26b3b48d6158987ee18f49ddc4258e40 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 11:14:34 +0100 Subject: [PATCH 03/14] Upgrade main code to py3.6 Using: find src -name '*.py' -exec pyupgrade --py36-plus {} \; --- src/tarski/__init__.py | 1 - src/tarski/analysis/csp.py | 2 +- src/tarski/benchmarks/blocksworld.py | 4 +- src/tarski/dl/__init__.py | 1 - src/tarski/dl/concepts.py | 46 +++++++-------- src/tarski/dl/errors.py | 1 - src/tarski/dl/factory.py | 27 +++++---- src/tarski/dl/features.py | 8 +-- src/tarski/errors.py | 9 ++- src/tarski/evaluators/simple.py | 2 +- src/tarski/fol.py | 5 +- src/tarski/fstrips/__init__.py | 1 - src/tarski/fstrips/action.py | 6 +- src/tarski/fstrips/contingent/errors.py | 1 - src/tarski/fstrips/contingent/problem.py | 3 +- src/tarski/fstrips/contingent/sensor.py | 8 +-- src/tarski/fstrips/derived.py | 4 +- src/tarski/fstrips/errors.py | 9 ++- src/tarski/fstrips/fstrips.py | 13 ++--- .../hybrid/differential_constraints.py | 11 ++-- src/tarski/fstrips/hybrid/errors.py | 1 - src/tarski/fstrips/hybrid/problem.py | 5 +- src/tarski/fstrips/hybrid/reaction.py | 9 ++- src/tarski/fstrips/manipulation/__init__.py | 1 - src/tarski/fstrips/manipulation/types.py | 1 - src/tarski/fstrips/ops.py | 2 +- src/tarski/fstrips/problem.py | 1 - src/tarski/fstrips/representation.py | 2 +- src/tarski/funcsym/__init__.py | 1 - src/tarski/grounding/__init__.py | 1 - src/tarski/grounding/errors.py | 3 +- src/tarski/grounding/lp_grounding.py | 2 +- src/tarski/grounding/naive/constraints.py | 3 +- .../grounding/naive/diff_constraints.py | 3 +- src/tarski/grounding/naive/instantiation.py | 1 - src/tarski/grounding/naive/reactions.py | 3 +- src/tarski/grounding/naive/sensors.py | 3 +- src/tarski/grounding/naive_grounding.py | 2 +- src/tarski/grounding/ops.py | 1 - src/tarski/io/__init__.py | 1 - src/tarski/io/_fstrips/parser/lexer.py | 2 +- src/tarski/io/_fstrips/parser/parser.py | 15 +++-- src/tarski/io/_fstrips/reader.py | 6 +- src/tarski/io/common.py | 3 +- src/tarski/io/fstrips.py | 44 +++++++-------- src/tarski/io/pddl/errors.py | 6 +- src/tarski/io/pddl/instance.py | 2 +- src/tarski/io/pddl/lexer.py | 2 +- src/tarski/io/pddl/parser.py | 46 +++++++-------- src/tarski/io/rddl.py | 56 +++++++++---------- src/tarski/ndl/temporal.py | 26 ++++----- src/tarski/rddl/task.py | 1 - src/tarski/reachability/__init__.py | 1 - src/tarski/reachability/asp.py | 18 +++--- src/tarski/reachability/clingo_wrapper.py | 6 +- src/tarski/sas/action.py | 2 +- src/tarski/search/__init__.py | 1 - src/tarski/search/model.py | 1 - src/tarski/syntax/__init__.py | 1 - src/tarski/syntax/algebra/matrix.py | 2 +- src/tarski/syntax/arithmetic/__init__.py | 2 +- src/tarski/syntax/arithmetic/random.py | 1 - src/tarski/syntax/formulas.py | 18 +++--- src/tarski/syntax/function.py | 1 - src/tarski/syntax/predicate.py | 1 - src/tarski/syntax/sorts.py | 7 +-- src/tarski/syntax/symrefs.py | 3 +- src/tarski/syntax/temporal/ltl.py | 13 ++--- src/tarski/syntax/terms.py | 4 +- src/tarski/syntax/transform/__init__.py | 1 - src/tarski/syntax/transform/prenex.py | 4 +- .../syntax/transform/simplifications.py | 1 - src/tarski/syntax/transform/substitutions.py | 1 - src/tarski/syntax/util.py | 1 - src/tarski/syntax/visitors.py | 1 - src/tarski/util.py | 2 +- src/tarski/utils/__init__.py | 1 - src/tarski/utils/algorithms.py | 2 +- src/tarski/utils/command.py | 4 +- src/tarski/utils/hashing.py | 1 - src/tarski/utils/resources.py | 2 +- src/tarski/utils/serialization.py | 2 - src/tarski/version.py | 1 - 83 files changed, 236 insertions(+), 287 deletions(-) diff --git a/src/tarski/__init__.py b/src/tarski/__init__.py index 59ef90fe..3cb04989 100644 --- a/src/tarski/__init__.py +++ b/src/tarski/__init__.py @@ -1,4 +1,3 @@ - import logging import sys as sys diff --git a/src/tarski/analysis/csp.py b/src/tarski/analysis/csp.py index 7b9af04a..1cfa994a 100644 --- a/src/tarski/analysis/csp.py +++ b/src/tarski/analysis/csp.py @@ -77,7 +77,7 @@ def check_hypergraph_acyclicity(hypergraph): Abiteboul, S., Hull, R. and Vianu, V (1995). Foundations of Databases, pp.131-132. """ nodes = set(itertools.chain.from_iterable(hypergraph)) - edges = set(frozenset(x) for x in hypergraph) # simply convert the tuple into frozensets + edges = {frozenset(x) for x in hypergraph} # simply convert the tuple into frozensets if len(edges) <= 1 or len(nodes) <= 1: return True diff --git a/src/tarski/benchmarks/blocksworld.py b/src/tarski/benchmarks/blocksworld.py index a7e1ad93..49741b4e 100644 --- a/src/tarski/benchmarks/blocksworld.py +++ b/src/tarski/benchmarks/blocksworld.py @@ -24,7 +24,7 @@ def generate_strips_bw_language(nblocks=4): lang.predicate('on', object_t, object_t) _ = [lang.predicate(p, object_t) for p in "ontable clear holding".split()] - _ = [lang.constant('b{}'.format(k), object_t) for k in range(1, nblocks + 1)] + _ = [lang.constant(f'b{k}', object_t) for k in range(1, nblocks + 1)] return lang @@ -41,7 +41,7 @@ def generate_fstrips_bw_language(nblocks=4): # Table and blocks lang.constant('table', place) - _ = [lang.constant('b{}'.format(k), block) for k in range(1, nblocks + 1)] + _ = [lang.constant(f'b{k}', block) for k in range(1, nblocks + 1)] return lang diff --git a/src/tarski/dl/__init__.py b/src/tarski/dl/__init__.py index 7fee141c..27d5e0af 100644 --- a/src/tarski/dl/__init__.py +++ b/src/tarski/dl/__init__.py @@ -1,4 +1,3 @@ - from .concepts import Concept, PrimitiveConcept, UniversalConcept, NotConcept, ExistsConcept, ForallConcept, \ EqualConcept, AndConcept, OrConcept, EmptyConcept, NominalConcept, NullaryAtom, GoalConcept, GoalNullaryAtom from .concepts import Role, PrimitiveRole, InverseRole, StarRole, RestrictRole, CompositionRole, GoalRole diff --git a/src/tarski/dl/concepts.py b/src/tarski/dl/concepts.py index 7ad4f1da..9f46d178 100644 --- a/src/tarski/dl/concepts.py +++ b/src/tarski/dl/concepts.py @@ -25,7 +25,7 @@ def __eq__(self, other): and self.name == other.name) def __repr__(self): - return "{}".format(self.name) + return f"{self.name}" __str__ = __repr__ @@ -35,7 +35,7 @@ def denotation(self, model): class GoalNullaryAtom(NullaryAtom): def __repr__(self): - return "{}_g".format(self.name) + return f"{self.name}_g" __str__ = __repr__ @@ -133,7 +133,7 @@ def denotation(self, model): return model.compressed(model.primitive_denotation(self), self.ARITY) def __repr__(self): - return "Nominal({})".format(self.name) + return f"Nominal({self.name})" __str__ = __repr__ @@ -166,7 +166,7 @@ def denotation(self, model): return model.compressed(model.primitive_denotation(self), self.ARITY) def __repr__(self): - return "{}".format(self.name) + return f"{self.name}" __str__ = __repr__ @@ -176,7 +176,7 @@ def flatten(self): class GoalConcept(PrimitiveConcept): def __repr__(self): - return "{}_g".format(self.name) + return f"{self.name}_g" __str__ = __repr__ @@ -199,7 +199,7 @@ def denotation(self, model): return ~model.compressed_denotation(self.c) def __repr__(self): - return 'Not({})'.format(self.c) + return f'Not({self.c})' __str__ = __repr__ @@ -230,7 +230,7 @@ def denotation(self, model): return ext_c1 & ext_c2 def __repr__(self): - return 'And({},{})'.format(self.c1, self.c2) + return f'And({self.c1},{self.c2})' __str__ = __repr__ @@ -261,7 +261,7 @@ def denotation(self, model): return ext_c1 | ext_c2 def __repr__(self): - return 'Or({},{})'.format(self.c1, self.c2) + return f'Or({self.c1},{self.c2})' __str__ = __repr__ @@ -291,11 +291,11 @@ def denotation(self, model): ext_c = model.uncompressed_denotation(self.c) ext_r = model.uncompressed_denotation(self.r) # result = [x for x in objects if [z for (y, z) in ext_r if y == x and z in ext_c]] - result = set(x for x, y in ext_r if y in ext_c) + result = {x for x, y in ext_r if y in ext_c} return model.compressed(result, self.ARITY) def __repr__(self): - return 'Exists({},{})'.format(self.r, self.c) + return f'Exists({self.r},{self.c})' __str__ = __repr__ @@ -332,7 +332,7 @@ def denotation(self, model): return model.compressed(result, self.ARITY) def __repr__(self): - return 'Forall({},{})'.format(self.r, self.c) + return f'Forall({self.r},{self.c})' __str__ = __repr__ @@ -363,14 +363,14 @@ def denotation(self, model): ext_r2 = model.uncompressed_denotation(self.r2) result = set() for x in universe: - left = set(z for (y, z) in ext_r1 if y == x) - right = set(z for (y, z) in ext_r2 if y == x) + left = {z for (y, z) in ext_r1 if y == x} + right = {z for (y, z) in ext_r2 if y == x} if left == right: result.add(x) return model.compressed(result, self.ARITY) def __repr__(self): - return 'Equal({},{})'.format(self.r1, self.r2) + return f'Equal({self.r1},{self.r2})' __str__ = __repr__ @@ -402,7 +402,7 @@ def denotation(self, model): return model.compressed(model.primitive_denotation(self), self.ARITY) def __repr__(self): - return '{}'.format(self.name) + return f'{self.name}' __str__ = __repr__ @@ -412,7 +412,7 @@ def flatten(self): class GoalRole(PrimitiveRole): def __repr__(self): - return "{}_g".format(self.name) + return f"{self.name}_g" __str__ = __repr__ @@ -434,11 +434,11 @@ def __eq__(self, other): def denotation(self, model): ext_r = model.uncompressed_denotation(self.r) - result = set((y, x) for (x, y) in ext_r) + result = {(y, x) for (x, y) in ext_r} return model.compressed(result, self.ARITY) def __repr__(self): - return 'Inverse({})'.format(self.r) + return f'Inverse({self.r})' __str__ = __repr__ @@ -466,7 +466,7 @@ def denotation(self, model): return model.compressed(result, self.ARITY) def __repr__(self): - return 'Star({})'.format(self.r) + return f'Star({self.r})' __str__ = __repr__ @@ -505,7 +505,7 @@ def denotation(self, model): return model.compressed(result, self.ARITY) def __repr__(self): - return 'Composition({},{})'.format(self.r1, self.r2) + return f'Composition({self.r1},{self.r2})' __str__ = __repr__ @@ -533,11 +533,11 @@ def __eq__(self, other): def denotation(self, model): ext_c = model.uncompressed_denotation(self.c) ext_r = model.uncompressed_denotation(self.r) - result = set((x, y) for (x, y) in ext_r if y in ext_c) + result = {(x, y) for (x, y) in ext_r if y in ext_c} return model.compressed(result, self.ARITY) def __repr__(self): - return 'Restrict({},{})'.format(self.r, self.c) + return f'Restrict({self.r},{self.c})' __str__ = __repr__ @@ -547,4 +547,4 @@ def flatten(self): def _check_arity(term, expected_arity, predfun): if expected_arity != predfun.uniform_arity(): - raise ArityDLMismatch('Cannot create {} from predicate "{}"'.format(term, predfun)) + raise ArityDLMismatch(f'Cannot create {term} from predicate "{predfun}"') diff --git a/src/tarski/dl/errors.py b/src/tarski/dl/errors.py index 7e6ed8e9..2c3c93bd 100644 --- a/src/tarski/dl/errors.py +++ b/src/tarski/dl/errors.py @@ -1,4 +1,3 @@ - from ..errors import TarskiError diff --git a/src/tarski/dl/factory.py b/src/tarski/dl/factory.py index becbcae9..b0295483 100644 --- a/src/tarski/dl/factory.py +++ b/src/tarski/dl/factory.py @@ -1,4 +1,3 @@ - import logging from .. import FirstOrderLanguage @@ -57,7 +56,7 @@ def generate_primitives_from_language(self, nominals, types, goal_predicates): roles.append(GoalRole(predfun)) else: - logging.warning('Predicate/Function "{}" with normalized arity > 2 ignored'.format(predfun)) + logging.warning(f'Predicate/Function "{predfun}" with normalized arity > 2 ignored') for c in nominals: concepts.append(NominalConcept(c.symbol, c.sort)) @@ -78,12 +77,12 @@ def create_exists_concept(self, role: Role, concept: Concept): _, s2 = role.sort if concept == self.bot: - logging.debug('Concept "{}" is statically empty'.format(result)) + logging.debug(f'Concept "{result}" is statically empty') return None # TODO ADD: If C is a sort-concept of the same sort than s2, then the concept will be equiv to exist(R.True) if not self.language.are_vertically_related(s2, concept.sort): - logging.debug('Concept "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Concept "{result}" pruned for type-inconsistency reasons') return None if isinstance(role, RestrictRole) and concept == self.top: @@ -109,7 +108,7 @@ def create_forall_concept(self, role: Role, concept: Concept): return None if not self.language.are_vertically_related(s2, concept.sort): - logging.debug('Concept "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Concept "{result}" pruned for type-inconsistency reasons') return None return result @@ -123,12 +122,12 @@ def create_and_concept(self, c1: Concept, c2: Concept): return None # No sense in C and C if c1 in (self.top, self.bot) or c2 in (self.top, self.bot): - logging.debug('AND of {} and {} pruned, no sense in AND\'ing with top or bot'.format(c1, c2)) + logging.debug(f'AND of {c1} and {c2} pruned, no sense in AND\'ing with top or bot') return None if sort is None: # i.e. c1 and c2 are disjoint types - logging.debug('AND of {} and {} pruned for type-inconsistency reasons'.format(c1, c2)) + logging.debug(f'AND of {c1} and {c2} pruned for type-inconsistency reasons') return None return AndConcept(c1, c2, sort) @@ -140,7 +139,7 @@ def create_or_concept(self, c1: Concept, c2: Concept): return None # No sense in C OR C if c1 in (self.top, self.bot) or c2 in (self.top, self.bot): - logging.debug('OR of {} and {} pruned, no sense in OR\'ing with top or bot'.format(c1, c2)) + logging.debug(f'OR of {c1} and {c2} pruned, no sense in OR\'ing with top or bot') return None return OrConcept(c1, c2, sort) @@ -151,7 +150,7 @@ def create_equal_concept(self, r1: Role, r2: Role): sort = self.language.most_restricted_type(r1.sort[0], r2.sort[0]) if sort is None: - logging.debug('Concept "EqualConcept({},{})" pruned for type-inconsistency reasons'.format(r1, r2)) + logging.debug(f'Concept "EqualConcept({r1},{r2})" pruned for type-inconsistency reasons') return None return EqualConcept(r1, r2, sort) @@ -159,15 +158,15 @@ def create_restrict_role(self, r: Role, c: Concept): result = RestrictRole(r, c) if not self.language.are_vertically_related(r.sort[1], c.sort): - logging.debug('Role "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Role "{result}" pruned for type-inconsistency reasons') return None if isinstance(c, UniversalConcept) or c == self.bot: - logging.debug('Role "{}" pruned; no sense in restricting to top / bot concepts'.format(result)) + logging.debug(f'Role "{result}" pruned; no sense in restricting to top / bot concepts') return None if isinstance(r, RestrictRole): - logging.debug('Role "{}" pruned; no direct nesting of restrictions'.format(result)) + logging.debug(f'Role "{result}" pruned; no direct nesting of restrictions') return None return result @@ -182,12 +181,12 @@ def create_composition_role(self, r1: Role, r2: Role): result = CompositionRole(r1, r2) if not self.language.are_vertically_related(r1.sort[1], r2.sort[0]): - logging.debug('Role "{}" pruned for type-inconsistency reasons'.format(result)) + logging.debug(f'Role "{result}" pruned for type-inconsistency reasons') return None num_comp = len(filter_subnodes(result, CompositionRole)) if num_comp > 2: - logging.debug('Role "{}" pruned: number of compositions ({}) exceeds threshold'.format(result, num_comp)) + logging.debug(f'Role "{result}" pruned: number of compositions ({num_comp}) exceeds threshold') return None return result diff --git a/src/tarski/dl/features.py b/src/tarski/dl/features.py index 6b620c42..4ffa5921 100644 --- a/src/tarski/dl/features.py +++ b/src/tarski/dl/features.py @@ -92,7 +92,7 @@ def diff(self, x, y): return compute_int_feature_diff(x, y) def __repr__(self): - return 'Num[{}]'.format(self.c) + return f'Num[{self.c}]' __str__ = __repr__ @@ -126,7 +126,7 @@ def diff(self, x, y): return compute_bool_feature_diff(x, y) def __repr__(self): - return 'Bool[{}]'.format(self.c) + return f'Bool[{self.c}]' __str__ = __repr__ @@ -214,7 +214,7 @@ def diff(self, x, y): return compute_int_feature_diff(x, y) def __repr__(self): - return 'Dist[{};{};{}]'.format(self.c1, self.r, self.c2) + return f'Dist[{self.c1};{self.r};{self.c2}]' __str__ = __repr__ @@ -269,7 +269,7 @@ def diff(self, x, y): return compute_bool_feature_diff(x, y) def __repr__(self): - return 'Atom[{}]'.format(self.atom) + return f'Atom[{self.atom}]' __str__ = __repr__ diff --git a/src/tarski/errors.py b/src/tarski/errors.py index 4e5c9746..703a4d2b 100644 --- a/src/tarski/errors.py +++ b/src/tarski/errors.py @@ -1,4 +1,3 @@ - class TarskiError(Exception): """ Common ancestor class to all of Tarski's exceptions """ @@ -89,7 +88,7 @@ class DuplicateActionDefinition(DuplicateDefinition): class DuplicateVariableDefinition(DuplicateDefinition): def __init__(self, variable, other, msg=None): - msg = msg or "Variable with name '{}' already defined in binding: {}".format(variable.symbol, other) + msg = msg or f"Variable with name '{variable.symbol}' already defined in binding: {other}" super().__init__(variable, other, msg) @@ -123,7 +122,7 @@ class UndefinedVariable(UndefinedElement): class UnboundVariable(SemanticError): def __init__(self, var, msg=None): - msg = msg or 'Attempted to evaluate open formula with free variable {}'.format(var) + msg = msg or f'Attempted to evaluate open formula with free variable {var}' super().__init__(msg) @@ -136,12 +135,12 @@ def __init__(self, element, point, value, msg=None): class UnknownTheory(LanguageError): def __init__(self, theory): - super().__init__('Unknown first-order theory "{}"'.format(theory)) + super().__init__(f'Unknown first-order theory "{theory}"') class CommandNotFoundError(TarskiError): def __init__(self, name, msg=None): - msg = msg or 'Necessary command "{}" could not be found'.format(name) + msg = msg or f'Necessary command "{name}" could not be found' super().__init__(msg) diff --git a/src/tarski/evaluators/simple.py b/src/tarski/evaluators/simple.py index 832be930..2901b37d 100644 --- a/src/tarski/evaluators/simple.py +++ b/src/tarski/evaluators/simple.py @@ -125,7 +125,7 @@ def symbolic_matrix_multiplication(lhs: Matrix, rhs: Matrix): C, D = rhs.shape if B != C: - raise TypeError('matrices {}x{} and {}x{} cannot be multiplied together'.format(A, B, C, D)) + raise TypeError(f'matrices {A}x{B} and {C}x{D} cannot be multiplied together') zip_b = list(zip(*rhs.matrix)) return [[sum(ele_a * ele_b for ele_a, ele_b in zip(row_a, col_b)) diff --git a/src/tarski/fol.py b/src/tarski/fol.py index 38c3f6cd..a1571508 100644 --- a/src/tarski/fol.py +++ b/src/tarski/fol.py @@ -1,4 +1,3 @@ - import copy import itertools from collections import defaultdict, OrderedDict @@ -224,7 +223,7 @@ def _retrieve_object(self, obj, type_): # obj must be a string, which we take as the name of a language element if type_ not in self._element_containers: - raise RuntimeError("Trying to index incorrect type {}".format(type_)) + raise RuntimeError(f"Trying to index incorrect type {type_}") if obj not in self._element_containers[type_]: raise err.UndefinedElement(obj) @@ -339,7 +338,7 @@ def dump(self): def check_well_formed(self): for _, s in self._sorts.items(): if s.cardinality() == 0: - raise err.LanguageError("Sort '{}' is empty!".format(s)) + raise err.LanguageError(f"Sort '{s}' is empty!") def most_restricted_type(self, t1, t2): if self.is_subtype(t1, t2): diff --git a/src/tarski/fstrips/__init__.py b/src/tarski/fstrips/__init__.py index 79829711..f2e92368 100644 --- a/src/tarski/fstrips/__init__.py +++ b/src/tarski/fstrips/__init__.py @@ -1,4 +1,3 @@ - from .problem import Problem, create_fstrips_problem from .action import Action from .derived import Derived diff --git a/src/tarski/fstrips/action.py b/src/tarski/fstrips/action.py index a91305cc..7e5f3ac5 100644 --- a/src/tarski/fstrips/action.py +++ b/src/tarski/fstrips/action.py @@ -19,7 +19,7 @@ def __lt__(self, other): return self.name < other.name def ident(self): - paramlist = "{}".format(','.join("{}: {}".format(p.symbol, p.sort.name) for p in self.parameters)) + paramlist = "{}".format(','.join(f"{p.symbol}: {p.sort.name}" for p in self.parameters)) return f'{self.name}({paramlist})' def __str__(self): @@ -27,8 +27,8 @@ def __str__(self): __repr__ = __str__ def print(self): - tokens = ['{}:'.format(self.ident()), - 'pre=({})'.format(self.precondition), + tokens = [f'{self.ident()}:', + f'pre=({self.precondition})', 'eff=({})'.format(' & '.join(str(eff) for eff in self.effects))] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/contingent/errors.py b/src/tarski/fstrips/contingent/errors.py index 9b6c1b5d..b40f939c 100644 --- a/src/tarski/fstrips/contingent/errors.py +++ b/src/tarski/fstrips/contingent/errors.py @@ -1,4 +1,3 @@ - from ...errors import TarskiError, DuplicateDefinition, UndefinedElement diff --git a/src/tarski/fstrips/contingent/problem.py b/src/tarski/fstrips/contingent/problem.py index 7a1cf5f0..6f8d9d9d 100644 --- a/src/tarski/fstrips/contingent/problem.py +++ b/src/tarski/fstrips/contingent/problem.py @@ -1,4 +1,3 @@ - from collections import OrderedDict from ..problem import Problem from .sensor import Sensor @@ -33,6 +32,6 @@ def get_symbols(self, pv, ev, cv): pv.visit(sensor.obs) def __str__(self): - return 'FSTRIPS Contingent Problem "{}", domain "{}"'.format(self.name, self.domain_name) + return f'FSTRIPS Contingent Problem "{self.name}", domain "{self.domain_name}"' __repr__ = __str__ diff --git a/src/tarski/fstrips/contingent/sensor.py b/src/tarski/fstrips/contingent/sensor.py index 124d6d66..27c61a04 100644 --- a/src/tarski/fstrips/contingent/sensor.py +++ b/src/tarski/fstrips/contingent/sensor.py @@ -25,7 +25,7 @@ def _check_well_formed(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.name, params) + return f'{self.name}({params})' def dump(self): return dict(name=self.name, @@ -34,7 +34,7 @@ def dump(self): obs=[eff.dump() for eff in self.effects.dump()]) def __str__(self): - tokens = ['action {}:'.format(self.name), - 'C=({})'.format(self.condition), - 'L=({})'.format(str(self.obs))] + tokens = [f'action {self.name}:', + f'C=({self.condition})', + f'L=({str(self.obs)})'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/derived.py b/src/tarski/fstrips/derived.py index 5b1f11c4..017882d3 100644 --- a/src/tarski/fstrips/derived.py +++ b/src/tarski/fstrips/derived.py @@ -27,10 +27,10 @@ def dump(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.predicate.symbol, params) + return f'{self.predicate.symbol}({params})' def __str__(self): tokens = ['derived {} {}:'.format(self.predicate.symbol, ' '.join(map(str, self.parameters))), - 'formula=({})'.format(self.formula)] + f'formula=({self.formula})'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/errors.py b/src/tarski/fstrips/errors.py index 16d31024..9c21bbee 100644 --- a/src/tarski/fstrips/errors.py +++ b/src/tarski/fstrips/errors.py @@ -1,17 +1,16 @@ - from ..errors import TarskiError, DuplicateDefinition, UndefinedElement class IncompleteProblemError(TarskiError): def __init__(self, problem, msg=None): msg = msg or 'specification is incomplete!' - super().__init__('Problem "{}": {}'.format(problem.name, msg)) + super().__init__(f'Problem "{problem.name}": {msg}') class InvalidEffectError(TarskiError): def __init__(self, effect, msg=None): - msg = msg or 'definition of effect "{}" is invalid!'.format(effect.tostring()) - super().__init__('{}'.format(msg)) + msg = msg or f'definition of effect "{effect.tostring()}" is invalid!' + super().__init__(f'{msg}') class DuplicateActionDefinition(DuplicateDefinition): @@ -31,5 +30,5 @@ def __init__(self, symbol, formula, msg=None): if msg is None: msg = ' ' - msg = 'definition of derived predicate "{} \\equiv {}" is invalid! {}'.format(symbol, formula, msg) + msg = f'definition of derived predicate "{symbol} \\equiv {formula}" is invalid! {msg}' super().__init__(msg) diff --git a/src/tarski/fstrips/fstrips.py b/src/tarski/fstrips/fstrips.py index 6b5acec4..9cde04d5 100644 --- a/src/tarski/fstrips/fstrips.py +++ b/src/tarski/fstrips/fstrips.py @@ -1,4 +1,3 @@ - from enum import Enum from typing import Union, List, Optional, Callable, Any @@ -18,7 +17,7 @@ def __str__(self): class SingleEffect(BaseEffect): def __str__(self): - return "({} -> {})".format(self.condition, self.tostring()) + return f"({self.condition} -> {self.tostring()})" __repr__ = __str__ @@ -33,7 +32,7 @@ def __init__(self, atom, condition=top): self.atom = atom def tostring(self): - return "ADD({})".format(self.atom) + return f"ADD({self.atom})" class DelEffect(SingleEffect): @@ -43,7 +42,7 @@ def __init__(self, atom, condition=top): self.atom = atom def tostring(self): - return "DEL({})".format(self.atom) + return f"DEL({self.atom})" class LiteralEffect(SingleEffect): @@ -52,7 +51,7 @@ def __init__(self, lit, condition=top): self.lit = lit def tostring(self): - return "LIT({})".format(self.lit) + return f"LIT({self.lit})" class FunctionalEffect(SingleEffect): @@ -156,7 +155,7 @@ def __init__(self, lhs, rhs, condition=top): self.check_well_formed() def tostring(self): - return "VectorisedEffect({} := {})".format(self.lhs, self.rhs) + return f"VectorisedEffect({self.lhs} := {self.rhs})" def check_well_formed(self): if not hasattr(self.lhs, 'shape'): @@ -195,7 +194,7 @@ def __init__(self, y, a, x, b, condition=top): self.check_well_formed() def tostring(self): - return "LinearEffect({} := {} * {} + {})".format(self.y, self.A, self.x, self.b) + return f"LinearEffect({self.y} := {self.A} * {self.x} + {self.b})" def check_well_formed(self): if not hasattr(self.y, 'shape'): diff --git a/src/tarski/fstrips/hybrid/differential_constraints.py b/src/tarski/fstrips/hybrid/differential_constraints.py index 5070e419..60bfadd4 100644 --- a/src/tarski/fstrips/hybrid/differential_constraints.py +++ b/src/tarski/fstrips/hybrid/differential_constraints.py @@ -1,4 +1,3 @@ - from ...syntax import BuiltinFunctionSymbol, CompoundTerm from . import errors as err @@ -24,7 +23,7 @@ def _check_well_formed(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.name, params) + return f'{self.name}({params})' def dump(self): return dict(name=self.name, @@ -34,8 +33,8 @@ def dump(self): ode=self.ode.dump()) def __str__(self): - tokens = ['reaction {}:'.format(self.name), - 'cond: ({})'.format(self.condition), - 'variate: {}'.format(self.variate), - 'ode: {}'.format(self.ode)] + tokens = [f'reaction {self.name}:', + f'cond: ({self.condition})', + f'variate: {self.variate}', + f'ode: {self.ode}'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/hybrid/errors.py b/src/tarski/fstrips/hybrid/errors.py index 7376a4e1..77834f0e 100644 --- a/src/tarski/fstrips/hybrid/errors.py +++ b/src/tarski/fstrips/hybrid/errors.py @@ -1,4 +1,3 @@ - from ... errors import DuplicateDefinition, UndefinedElement, SyntacticError diff --git a/src/tarski/fstrips/hybrid/problem.py b/src/tarski/fstrips/hybrid/problem.py index 881ac3d7..ebfcf458 100644 --- a/src/tarski/fstrips/hybrid/problem.py +++ b/src/tarski/fstrips/hybrid/problem.py @@ -1,4 +1,3 @@ - from collections import OrderedDict from ..problem import Problem @@ -58,12 +57,12 @@ def get_symbols(self, pv, ev, cv): for yk in eff.lhs[:, 0]: ev.visit(yk) else: - raise RuntimeError("Effect type '{}' cannot be analysed".format(type(eff))) + raise RuntimeError(f"Effect type '{type(eff)}' cannot be analysed") for _, dc in self.differential_constraints.items(): pv.visit(dc.condition) pv.visit(dc.variate) def __str__(self): - return 'FSTRIPS Hybrid Problem "{}", domain "{}"'.format(self.name, self.domain_name) + return f'FSTRIPS Hybrid Problem "{self.name}", domain "{self.domain_name}"' __repr__ = __str__ diff --git a/src/tarski/fstrips/hybrid/reaction.py b/src/tarski/fstrips/hybrid/reaction.py index 8dcc3121..7cbafa95 100644 --- a/src/tarski/fstrips/hybrid/reaction.py +++ b/src/tarski/fstrips/hybrid/reaction.py @@ -1,4 +1,3 @@ - class Reaction: """ A (possibly lifted) reaction """ @@ -15,7 +14,7 @@ def _check_well_formed(self): def ident(self): params = ', '.join([str(o) for o in self.parameters]) - return '{}({})'.format(self.name, params) + return f'{self.name}({params})' def dump(self): return dict(name=self.name, @@ -24,7 +23,7 @@ def dump(self): effect=[eff.dump() for eff in self.effect.dump()]) def __str__(self): - tokens = ['reaction {}:'.format(self.name), - 'cond: ({})'.format(self.condition), - 'eff: ({})'.format(self.effect)] + tokens = [f'reaction {self.name}:', + f'cond: ({self.condition})', + f'eff: ({self.effect})'] return '\n'.join(tokens) diff --git a/src/tarski/fstrips/manipulation/__init__.py b/src/tarski/fstrips/manipulation/__init__.py index 4c192253..068cd50e 100644 --- a/src/tarski/fstrips/manipulation/__init__.py +++ b/src/tarski/fstrips/manipulation/__init__.py @@ -1,2 +1 @@ - from .simplify import Simplify diff --git a/src/tarski/fstrips/manipulation/types.py b/src/tarski/fstrips/manipulation/types.py index f645778f..839c031c 100644 --- a/src/tarski/fstrips/manipulation/types.py +++ b/src/tarski/fstrips/manipulation/types.py @@ -1,4 +1,3 @@ - # import copy # TODO - Work in Progress diff --git a/src/tarski/fstrips/ops.py b/src/tarski/fstrips/ops.py index 89588364..9967c9b3 100644 --- a/src/tarski/fstrips/ops.py +++ b/src/tarski/fstrips/ops.py @@ -26,7 +26,7 @@ def collect_all_symbols(problem: Problem, include_builtin=False) -> Set[Union[Pr walker.run(problem) if include_builtin: return walker.symbols - return set(s for s in walker.symbols if not s.builtin) + return {s for s in walker.symbols if not s.builtin} def collect_affected_symbols(problem: Problem) -> Set[Union[Predicate, Function]]: diff --git a/src/tarski/fstrips/problem.py b/src/tarski/fstrips/problem.py index 321b81c9..ba8759f6 100644 --- a/src/tarski/fstrips/problem.py +++ b/src/tarski/fstrips/problem.py @@ -1,4 +1,3 @@ - from collections import OrderedDict from .. import model diff --git a/src/tarski/fstrips/representation.py b/src/tarski/fstrips/representation.py index 0f9eead5..aa928226 100644 --- a/src/tarski/fstrips/representation.py +++ b/src/tarski/fstrips/representation.py @@ -368,7 +368,7 @@ def identify_cost_related_functions(problem: Problem) -> Set[str]: for effect in action.effects: mark_cost_unrelated_functions_in_effect(effect, related_to_non_cost_effects) - return set(f.name for f in functions if f.name not in related_to_non_cost_effects) + return {f.name for f in functions if f.name not in related_to_non_cost_effects} def mark_cost_unrelated_functions_in_effect(effect, functions): diff --git a/src/tarski/funcsym/__init__.py b/src/tarski/funcsym/__init__.py index 289d4bec..e5d16b4f 100644 --- a/src/tarski/funcsym/__init__.py +++ b/src/tarski/funcsym/__init__.py @@ -1,4 +1,3 @@ - from .. import modules diff --git a/src/tarski/grounding/__init__.py b/src/tarski/grounding/__init__.py index c0726ccb..c8cc835c 100644 --- a/src/tarski/grounding/__init__.py +++ b/src/tarski/grounding/__init__.py @@ -1,4 +1,3 @@ - from .naive_grounding import ProblemGrounding, create_all_possible_state_variables, \ NaiveGroundingStrategy from .lp_grounding import LPGroundingStrategy diff --git a/src/tarski/grounding/errors.py b/src/tarski/grounding/errors.py index af44e3ce..d9daf834 100644 --- a/src/tarski/grounding/errors.py +++ b/src/tarski/grounding/errors.py @@ -1,11 +1,10 @@ - from ..errors import TarskiError class UnableToGroundError(TarskiError): def __init__(self, sym, msg=None): msg = msg or 'Reason unspecified' - super().__init__('Unable to ground Term/Atom "{}": {}'.format(sym, msg)) + super().__init__(f'Unable to ground Term/Atom "{sym}": {msg}') class ReachabilityLPUnsolvable(TarskiError): diff --git a/src/tarski/grounding/lp_grounding.py b/src/tarski/grounding/lp_grounding.py index fb602afa..0deae072 100644 --- a/src/tarski/grounding/lp_grounding.py +++ b/src/tarski/grounding/lp_grounding.py @@ -77,7 +77,7 @@ def _solve_lp(self): return self.model def __str__(self): - return 'LPGroundingStrategy["{}"]'.format(self.problem.name) + return f'LPGroundingStrategy["{self.problem.name}"]' __repr__ = __str__ diff --git a/src/tarski/grounding/naive/constraints.py b/src/tarski/grounding/naive/constraints.py index b3c6079b..c7fe7052 100644 --- a/src/tarski/grounding/naive/constraints.py +++ b/src/tarski/grounding/naive/constraints.py @@ -1,4 +1,3 @@ - import itertools from ...syntax import QuantifiedFormula, Quantifier, create_substitution, substitute_expression @@ -20,7 +19,7 @@ def __init__(self, prob, index): self.constraints_generated = 0 def __str__(self): - return 'Constraints Generated: {}'.format(self.constraints_generated) + return f'Constraints Generated: {self.constraints_generated}' def calculate_constraints(self): diff --git a/src/tarski/grounding/naive/diff_constraints.py b/src/tarski/grounding/naive/diff_constraints.py index a75fea92..8a2d4b93 100644 --- a/src/tarski/grounding/naive/diff_constraints.py +++ b/src/tarski/grounding/naive/diff_constraints.py @@ -1,4 +1,3 @@ - import itertools from ...fstrips import hybrid @@ -18,7 +17,7 @@ def __init__(self, prob, index): self.differential_constraints_generated = 0 def __str__(self): - return 'Reactions generated: {}'.format(self.differential_constraints_generated) + return f'Reactions generated: {self.differential_constraints_generated}' def calculate_constraints(self): diff --git a/src/tarski/grounding/naive/instantiation.py b/src/tarski/grounding/naive/instantiation.py index 5662f902..68969f75 100644 --- a/src/tarski/grounding/naive/instantiation.py +++ b/src/tarski/grounding/naive/instantiation.py @@ -1,4 +1,3 @@ - from ...syntax.terms import Constant, Variable from ..errors import UnableToGroundError diff --git a/src/tarski/grounding/naive/reactions.py b/src/tarski/grounding/naive/reactions.py index a2b9a7b5..6566d928 100644 --- a/src/tarski/grounding/naive/reactions.py +++ b/src/tarski/grounding/naive/reactions.py @@ -1,4 +1,3 @@ - import itertools from ...fstrips import hybrid @@ -19,7 +18,7 @@ def __init__(self, prob, index): self.reactions_generated = 0 def __str__(self): - return 'Reactions generated: {}'.format(self.reactions_generated) + return f'Reactions generated: {self.reactions_generated}' def calculate_reactions(self): diff --git a/src/tarski/grounding/naive/sensors.py b/src/tarski/grounding/naive/sensors.py index 27f9a28b..f9d33204 100644 --- a/src/tarski/grounding/naive/sensors.py +++ b/src/tarski/grounding/naive/sensors.py @@ -1,4 +1,3 @@ - import itertools from ...fstrips.contingent import Sensor @@ -18,7 +17,7 @@ def __init__(self, prob, index): self.sensors_generated = 0 def __str__(self): - return 'Sensors generated: {}'.format(self.sensors_generated) + return f'Sensors generated: {self.sensors_generated}' def calculate_sensors(self): # @TODO: this is pretty much the same code as that of grounding actions diff --git a/src/tarski/grounding/naive_grounding.py b/src/tarski/grounding/naive_grounding.py index 07e12e45..388d6fa4 100644 --- a/src/tarski/grounding/naive_grounding.py +++ b/src/tarski/grounding/naive_grounding.py @@ -138,7 +138,7 @@ def ground_actions(self): return groundings def __str__(self): - return 'NaiveGroundingStrategy["{}"]'.format(self.problem.name) + return f'NaiveGroundingStrategy["{self.problem.name}"]' __repr__ = __str__ diff --git a/src/tarski/grounding/ops.py b/src/tarski/grounding/ops.py index add93bbd..300a080f 100644 --- a/src/tarski/grounding/ops.py +++ b/src/tarski/grounding/ops.py @@ -1,4 +1,3 @@ - from ..syntax.util import get_symbols from ..fstrips.ops import collect_affected_symbols diff --git a/src/tarski/io/__init__.py b/src/tarski/io/__init__.py index 33225f0c..4250741b 100644 --- a/src/tarski/io/__init__.py +++ b/src/tarski/io/__init__.py @@ -1,4 +1,3 @@ - from .fstrips import FstripsReader, FstripsWriter from .utils import find_domain_filename diff --git a/src/tarski/io/_fstrips/parser/lexer.py b/src/tarski/io/_fstrips/parser/lexer.py index 500531ae..6e0def01 100644 --- a/src/tarski/io/_fstrips/parser/lexer.py +++ b/src/tarski/io/_fstrips/parser/lexer.py @@ -636,7 +636,7 @@ class fstripsLexer(Lexer): K_PRECONDITION = 90 K_EFFECT = 91 - channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ] modeNames = [ "DEFAULT_MODE" ] diff --git a/src/tarski/io/_fstrips/parser/parser.py b/src/tarski/io/_fstrips/parser/parser.py index f29a2cc3..654357fe 100644 --- a/src/tarski/io/_fstrips/parser/parser.py +++ b/src/tarski/io/_fstrips/parser/parser.py @@ -1,5 +1,4 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 -# encoding: utf-8 from antlr4 import * from io import StringIO from typing.io import TextIO @@ -1935,7 +1934,7 @@ def typename(self): self.state = 269 self._errHandler.sync(self) _la = self._input.LA(1) - if not (((((_la - 79)) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (fstripsParser.INT_T - 79)) | (1 << (fstripsParser.FLOAT_T - 79)) | (1 << (fstripsParser.OBJECT_T - 79)) | (1 << (fstripsParser.NUMBER_T - 79)) | (1 << (fstripsParser.NAME - 79)))) != 0)): + if not (((_la - 79) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (fstripsParser.INT_T - 79)) | (1 << (fstripsParser.FLOAT_T - 79)) | (1 << (fstripsParser.OBJECT_T - 79)) | (1 << (fstripsParser.NUMBER_T - 79)) | (1 << (fstripsParser.NAME - 79)))) != 0): break self.state = 271 @@ -3642,7 +3641,7 @@ def atomicTermFormula(self): self.state = 444 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or ((((_la - 83)) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): + while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or (((_la - 83) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): self.state = 441 self.term() self.state = 446 @@ -3970,7 +3969,7 @@ def functionTerm(self): self.state = 461 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or ((((_la - 83)) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): + while _la==fstripsParser.T__0 or _la==fstripsParser.T__15 or (((_la - 83) & ~0x3f) == 0 and ((1 << (_la - 83)) & ((1 << (fstripsParser.NAME - 83)) | (1 << (fstripsParser.VARIABLE - 83)) | (1 << (fstripsParser.NUMBER - 83)))) != 0): self.state = 458 self.term() self.state = 463 @@ -4659,7 +4658,7 @@ def builtin_binary_function(self): self.enterOuterAlt(localctx, 1) self.state = 551 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__18) | (1 << fstripsParser.T__19) | (1 << fstripsParser.T__20) | (1 << fstripsParser.T__21) | (1 << fstripsParser.T__22) | (1 << fstripsParser.T__23))) != 0)): + if not(((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__18) | (1 << fstripsParser.T__19) | (1 << fstripsParser.T__20) | (1 << fstripsParser.T__21) | (1 << fstripsParser.T__22) | (1 << fstripsParser.T__23))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -4708,7 +4707,7 @@ def builtin_unary_function(self): self.enterOuterAlt(localctx, 1) self.state = 553 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__24) | (1 << fstripsParser.T__25) | (1 << fstripsParser.T__26) | (1 << fstripsParser.T__27) | (1 << fstripsParser.T__28) | (1 << fstripsParser.T__29) | (1 << fstripsParser.T__30) | (1 << fstripsParser.T__31) | (1 << fstripsParser.T__32))) != 0)): + if not(((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__6) | (1 << fstripsParser.T__24) | (1 << fstripsParser.T__25) | (1 << fstripsParser.T__26) | (1 << fstripsParser.T__27) | (1 << fstripsParser.T__28) | (1 << fstripsParser.T__29) | (1 << fstripsParser.T__30) | (1 << fstripsParser.T__31) | (1 << fstripsParser.T__32))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -4757,7 +4756,7 @@ def builtin_binary_predicate(self): self.enterOuterAlt(localctx, 1) self.state = 555 _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__33) | (1 << fstripsParser.T__34) | (1 << fstripsParser.T__35) | (1 << fstripsParser.T__36) | (1 << fstripsParser.T__37))) != 0)): + if not(((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << fstripsParser.T__33) | (1 << fstripsParser.T__34) | (1 << fstripsParser.T__35) | (1 << fstripsParser.T__36) | (1 << fstripsParser.T__37))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -4817,7 +4816,7 @@ def assignOp(self): self.enterOuterAlt(localctx, 1) self.state = 557 _la = self._input.LA(1) - if not(((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (fstripsParser.K_INCREASE - 75)) | (1 << (fstripsParser.K_DECREASE - 75)) | (1 << (fstripsParser.K_SCALEUP - 75)) | (1 << (fstripsParser.K_SCALEDOWN - 75)))) != 0)): + if not(((_la - 75) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (fstripsParser.K_INCREASE - 75)) | (1 << (fstripsParser.K_DECREASE - 75)) | (1 << (fstripsParser.K_SCALEUP - 75)) | (1 << (fstripsParser.K_SCALEDOWN - 75)))) != 0): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) diff --git a/src/tarski/io/_fstrips/reader.py b/src/tarski/io/_fstrips/reader.py index ac46ffb1..effae36e 100644 --- a/src/tarski/io/_fstrips/reader.py +++ b/src/tarski/io/_fstrips/reader.py @@ -159,7 +159,7 @@ def visitTypeBoundsDefinition(self, ctx): typename = ctx.NAME().getText().lower() sort = self.language.get_sort(typename) if not isinstance(sort, Interval): - raise ParsingError("Attempt at bounding symbolic non-interval sort '{}'".format(sort)) + raise ParsingError(f"Attempt at bounding symbolic non-interval sort '{sort}'") # Encode the bounds and set them into the sort lower = sort.encode(ctx.NUMBER(0).getText()) @@ -221,7 +221,7 @@ def visitTermNumber(self, ctx): def _recover_variable_from_context(self, name): if self.current_binding is None: - raise ParsingError("Variable '{}' used declared outside variable binding".format(name)) + raise ParsingError(f"Variable '{name}' used declared outside variable binding") return self.current_binding.get(name) @@ -459,7 +459,7 @@ def __init__(self, component, value): self.value = value def __str__(self): - return 'in {} found undeclared variable {}'.format(self.component, repr(self.value)) + return f'in {self.component} found undeclared variable {repr(self.value)}' class ParserVariableContext: diff --git a/src/tarski/io/common.py b/src/tarski/io/common.py index bb5fc050..1372925c 100644 --- a/src/tarski/io/common.py +++ b/src/tarski/io/common.py @@ -1,9 +1,8 @@ - import os _CURRENT_DIR_ = os.path.dirname(os.path.realpath(__file__)) def load_tpl(name): - with open(os.path.join(_CURRENT_DIR_, "templates", name), 'r', encoding='utf8') as file: + with open(os.path.join(_CURRENT_DIR_, "templates", name), encoding='utf8') as file: return file.read() diff --git a/src/tarski/io/fstrips.py b/src/tarski/io/fstrips.py index 5de5ed61..b96b20a4 100644 --- a/src/tarski/io/fstrips.py +++ b/src/tarski/io/fstrips.py @@ -50,7 +50,7 @@ def read_problem(self, domain, instance): return self.problem def parse_file(self, filename, start_rule): - logging.debug('Parsing filename "{}" from grammar rule "{}"'.format(filename, start_rule)) + logging.debug(f'Parsing filename "{filename}" from grammar rule "{start_rule}"') domain_parse_tree, _ = self.parser.parse_file(filename, start_rule) self.parser.visit(domain_parse_tree) @@ -71,7 +71,7 @@ def parse_instance_string(self, instance): return self.problem def parse_string(self, string, start_rule): - logging.debug('Parsing custom string from grammar rule "{}"'.format(start_rule)) + logging.debug(f'Parsing custom string from grammar rule "{start_rule}"') parse_tree, _ = self.parser.parse_string(string, start_rule) logging.debug("Processing AST") return self.parser.visit(parse_tree) @@ -104,7 +104,7 @@ def print_objects(constants): elements = [] for sort in sorted(constants_by_sort.keys()): sobjects = " ".join(sorted(constants_by_sort[sort])) - elements.append("{} - {}".format(sobjects, sort)) + elements.append(f"{sobjects} - {sort}") return linebreaks(elements, indentation=2, indent_first=False) @@ -126,14 +126,14 @@ def print_init(problem): continue # Ignore intensionally defined symbols fname = signature[0] for point, value in definition.data.items(): - elements.append("(= ({} {}) {})".format(fname, print_term_ref_list(point), value)) + elements.append(f"(= ({fname} {print_term_ref_list(point)}) {value})") # e.g. (clear b1) for signature, definition in problem.init.predicate_extensions.items(): assert isinstance(definition, set) predname = signature[0] for point in definition: - elements.append("({} {})".format(predname, print_term_ref_list(point))) + elements.append(f"({predname} {print_term_ref_list(point)})") return linebreaks(elements, indentation=2, indent_first=False) @@ -149,7 +149,7 @@ def print_domain_bounds(problem): if not sort.builtin and isinstance(sort, Interval): assert lang.has_sort('Integer') if lang.Integer in ancestors(sort): - bounds.append("({} - int[{}..{}])".format(sort.name, sort.lower_bound, sort.upper_bound)) + bounds.append(f"({sort.name} - int[{sort.lower_bound}..{sort.upper_bound}])") elif lang.Real in ancestors(sort): pass # TODO @@ -157,7 +157,7 @@ def print_domain_bounds(problem): return "" inner = "\n".join(indent(b, 2) for b in bounds) - return "(:bounds\n{})".format(inner) + return f"(:bounds\n{inner})" def print_problem_constraints(problem): @@ -261,7 +261,7 @@ def get_functions(self): continue # Don't declare builtin elements domain_str = build_signature_string(fun.domain) codomain_str = tarski_to_pddl_type(fun.codomain) - res.append("({} {}) - {}".format(fun.symbol, domain_str, codomain_str)) + res.append(f"({fun.symbol} {domain_str}) - {codomain_str}") return ("\n" + _TAB * 2).join(res) def get_predicates(self): @@ -270,7 +270,7 @@ def get_predicates(self): if fun.builtin: continue # Don't declare builtin elements domain_str = build_signature_string(fun.sort) - res.append("({} {})".format(fun.symbol, domain_str)) + res.append(f"({fun.symbol} {domain_str})") return ("\n" + _TAB * 2).join(res) def get_actions(self): @@ -322,13 +322,13 @@ def print_formula(formula, indentation=0): elif isinstance(formula, Atom): return print_atom(formula) elif isinstance(formula, CompoundFormula): - return "({} {})".format(formula.connective, print_formula_list(formula.subformulas)) + return f"({formula.connective} {print_formula_list(formula.subformulas)})" elif isinstance(formula, QuantifiedFormula): vars_ = print_variable_list(formula.variables) # e.g. (exists (?x - object) (and (= ?x 2))) - return '({} ({}) {})'.format(formula.quantifier, vars_, print_formula(formula.formula)) - raise RuntimeError("Unexpected element type: {}".format(formula)) + return f'({formula.quantifier} ({vars_}) {print_formula(formula.formula)})' + raise RuntimeError(f"Unexpected element type: {formula}") def print_effects(effects, cost=None, indentation=0): @@ -347,19 +347,19 @@ def print_unconditional_effect(eff, indentation=0): increase = isinstance(eff, IncreaseEffect) if increase: - return indent("(increase {} {})".format(print_term(eff.lhs), print_term(eff.rhs)), indentation) + return indent(f"(increase {print_term(eff.lhs)} {print_term(eff.rhs)})", indentation) elif functional: - return indent("(assign {} {})".format(print_term(eff.lhs), print_term(eff.rhs)), indentation) + return indent(f"(assign {print_term(eff.lhs)} {print_term(eff.rhs)})", indentation) elif isinstance(eff, AddEffect): - return indent("{}".format(print_atom(eff.atom)), indentation) + return indent(f"{print_atom(eff.atom)}", indentation) elif isinstance(eff, DelEffect): - return indent("(not {})".format(print_atom(eff.atom)), indentation) + return indent(f"(not {print_atom(eff.atom)})", indentation) elif isinstance(eff, UniversalEffect): effect_str = (print_effect(eff.effects[0]) if len(eff.effects) == 1 else print_effects(eff.effects)) - return indent("(forall ({}) {})".format(print_variable_list(eff.variables), effect_str), + return indent(f"(forall ({print_variable_list(eff.variables)}) {effect_str})", indentation) - raise RuntimeError("Unexpected element type: {}".format(eff)) + raise RuntimeError(f"Unexpected element type: {eff}") def print_effect(eff, indentation=0): @@ -367,7 +367,7 @@ def print_effect(eff, indentation=0): if conditional: return indent( - "(when {} {})".format(print_formula(eff.condition), print_unconditional_effect(eff)), + f"(when {print_formula(eff.condition)} {print_unconditional_effect(eff)})", indentation) else: return print_unconditional_effect(eff, indentation) @@ -378,10 +378,10 @@ def print_term(term): if isinstance(term, Variable): return print_variable_name(term.symbol) elif isinstance(term, CompoundTerm): - return "({} {})".format(term.symbol.symbol, print_term_list(term.subterms)) + return f"({term.symbol.symbol} {print_term_list(term.subterms)})" elif isinstance(term, Constant): - return "{}".format(term.symbol) - raise RuntimeError("Unexpected element type: {}".format(term)) + return f"{term.symbol}" + raise RuntimeError(f"Unexpected element type: {term}") def print_atom(atom: Atom): diff --git a/src/tarski/io/pddl/errors.py b/src/tarski/io/pddl/errors.py index 2eecbc52..5fcb5b09 100644 --- a/src/tarski/io/pddl/errors.py +++ b/src/tarski/io/pddl/errors.py @@ -15,7 +15,7 @@ def __init__(self, line, msg): self.msg = msg def __str__(self): - return "Semantic Error: Line {}: {}".format(self.line, self.msg) + return f"Semantic Error: Line {self.line}: {self.msg}" class ParseError(Exception): @@ -26,7 +26,7 @@ def __init__(self, line, msg): self.msg = msg def __str__(self): - return "Parse Error: Line {}: {}".format(self.line, self.msg) + return f"Parse Error: Line {self.line}: {self.msg}" class UnsupportedFeature(Exception): @@ -37,4 +37,4 @@ def __init__(self, line, msg): self.msg = msg def __str__(self): - return "Unsupported PDDL feature: Line {}: {}".format(self.line, self.msg) + return f"Unsupported PDDL feature: Line {self.line}: {self.msg}" diff --git a/src/tarski/io/pddl/instance.py b/src/tarski/io/pddl/instance.py index 9d99afc9..1dafe30f 100644 --- a/src/tarski/io/pddl/instance.py +++ b/src/tarski/io/pddl/instance.py @@ -368,4 +368,4 @@ def process_objective_definition(self, objective_data): type=objective_data['definition']['type'], expr=objective_data['definition']['expr']) if self.debug: - print("Objective: mode: {} type: {} expr: {}".format(self.objective.mode, self.objective.type, self.objective.expr)) + print(f"Objective: mode: {self.objective.mode} type: {self.objective.type} expr: {self.objective.expr}") diff --git a/src/tarski/io/pddl/lexer.py b/src/tarski/io/pddl/lexer.py index b4d7e0c2..24f057b2 100644 --- a/src/tarski/io/pddl/lexer.py +++ b/src/tarski/io/pddl/lexer.py @@ -236,7 +236,7 @@ def t_NAT(self, t): return t def t_error(self, t): - print("Illegal character: {} at line: {}".format(repr(t.value[0]), self._lexer.lineno)) + print(f"Illegal character: {repr(t.value[0])} at line: {self._lexer.lineno}") t.lexer.skip(1) def lineno(self): diff --git a/src/tarski/io/pddl/parser.py b/src/tarski/io/pddl/parser.py index 7f2ccc49..ea933477 100644 --- a/src/tarski/io/pddl/parser.py +++ b/src/tarski/io/pddl/parser.py @@ -68,7 +68,7 @@ def parse(self, input_data): def _print_verbose(self, p_name): if self.verbose: - print('>> Parsed `{}` ...'.format(p_name)) + print(f'>> Parsed `{p_name}` ...') def p_begin(self, p): '''begin : domain @@ -122,7 +122,7 @@ def p_domain_ref(self, p): '''domain_ref : LPAREN rwDOMAIN_REF ID RPAREN''' expected_domain = p[3] if expected_domain != self.domain_name: - msg = "Domain and problem mismatch: expected domain name is '{}', provided domain is '{}'".format(expected_domain, self.domain_name) + msg = f"Domain and problem mismatch: expected domain name is '{expected_domain}', provided domain is '{self.domain_name}'" raise SemanticError(self.lexer.lineno(), msg) def p_domain_require_def(self, p): @@ -300,14 +300,14 @@ def p_constants_def(self, p): if isinstance(entry, tuple): typename, constant_list = entry if typename not in self.instance.types: - msg = "Error parsing (:constants ) section: type '{}' was not defined".format(typename) + msg = f"Error parsing (:constants ) section: type '{typename}' was not defined" raise SemanticError(self.lexer.lineno(), msg) self.instance.process_constant_definition(entry) total_constants += len(constant_list) else: - msg = "Error processing (:constants ) section: constant '{}' has no type attached".format(entry) + msg = f"Error processing (:constants ) section: constant '{entry}' has no type attached" raise SemanticError(self.lexer.lineno(), msg) if self.debug: print("Total constants defined:", total_constants) @@ -350,7 +350,7 @@ def normalize_typed_variable_list(self, unnorm_args): if token_type == 'type': var_type = token_value if var_type not in self.instance.types: - msg = "Error parsing list of typed variables: type '{}' is not defined".format(var_type) + msg = f"Error parsing list of typed variables: type '{var_type}' is not defined" raise SemanticError(self.lexer.lineno(), msg) for t2 in unnorm_args[last_index+1:i]: var_term, var_sort = self.instance.get_variable(t2[1], var_type) @@ -649,7 +649,7 @@ def p_GD(self, p): p[0] = lor(neg(p[3]), p[4]) elif p[2] == self.lexer.symbols.rwEXISTS: if self.debug: - print('existential quantifier, scope tokens: {} formula: {}'.format(p[3], p[4])) + print(f'existential quantifier, scope tokens: {p[3]} formula: {p[4]}') vars = p[3] phi = p[4] p[0] = QuantifiedFormula(Quantifier.Exists, [entry['term'] for entry in vars], phi) @@ -722,7 +722,7 @@ def p_term(self, p): try: func_name = self.instance.get(p[2]) except tsk.LanguageError as e: - msg = "Error parsing term in formula, function '{}' is not declared".format(p[2]) + msg = f"Error parsing term in formula, function '{p[2]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) sub_terms = p[3] p[0] = func_name(*sub_terms) @@ -732,14 +732,14 @@ def p_term(self, p): constant_ref = self.instance.get(p[1]) p[0] = constant_ref except tsk.LanguageError as e: - msg = "Error parsing term in formula, constant '{}' is not declared".format(p[1]) + msg = f"Error parsing term in formula, constant '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) elif self.lexer.is_variable(p[1]): try: var_ref = self.var_dict[p[1]] p[0] = var_ref except KeyError as e: - msg = "Error parsing term in formula, variable '{}' is not declared in the current scope".format(p[1]) + msg = f"Error parsing term in formula, variable '{p[1]}' is not declared in the current scope" raise SemanticError(self.lexer.lineno(), msg) def p_function_term(self, p): @@ -749,7 +749,7 @@ def p_function_term(self, p): sub_terms = p[3] p[0] = func_name(*sub_terms) except tsk.LanguageError as e: - msg = "Error parsing function term, function '{}' is not declared".format(p[2]) + msg = f"Error parsing function term, function '{p[2]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) def p_list_of_expression(self, p): @@ -797,7 +797,7 @@ def p_f_exp(self, p): p[0] = constant_ref return except tsk.LanguageError as e: - msg = "Error parsing expression, constant '{}' is not declared".format(p[1]) + msg = f"Error parsing expression, constant '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) else: # raise error @@ -833,7 +833,7 @@ def p_f_head(self, p): func_name = self.instance.get(p[1]) p[0] = func_name() except tsk.LanguageError as e: - msg = "Error parsing expression, function '{}' is not declared".format(p[1]) + msg = f"Error parsing expression, function '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) return @@ -842,7 +842,7 @@ def p_f_head(self, p): sub_terms = p[3] p[0] = func_name(*sub_terms) except tsk.LanguageError as e: - msg = "Error parsing expression, function '{}' is not declared".format(p[1]) + msg = f"Error parsing expression, function '{p[1]}' is not declared" raise SemanticError(self.lexer.lineno(), msg) def p_binary_op(self, p): @@ -1118,7 +1118,7 @@ def p_simple_duration_constraint(self, p): raise UnsupportedFeature(self.lexer.lineno(), msg) variable = p[3] if variable != '?duration': - msg = "Error parsing duration of durative action: found variable '{}' rather than '?duration'".format(variable) + msg = f"Error parsing duration of durative action: found variable '{variable}' rather than '?duration'" raise ParseError(self.lexer.lineno(), msg) p[0] = p[4] @@ -1228,17 +1228,17 @@ def p_derived_def(self, p): try: head_pred = self.instance.predicates.get(symbol) except ValueError as e: - msg = "Error parsing derived predicate, head symbol '{}' is not declared".format(symbol) + msg = f"Error parsing derived predicate, head symbol '{symbol}' is not declared" raise SemanticError(self.lexer.lineno(), msg) for k, arg in enumerate(head_pred.domain): if self.debug: print('signature: {} provided: {}'.format(head_pred.domain[k], var_list[k]['type'])) if not head_pred.domain[k] == var_list[k]['type']: - msg = "Error parsing derived predicate, head predicate '{}' type mismatch, check definition in (:predicates ...)".format(symbol) + msg = f"Error parsing derived predicate, head predicate '{symbol}' type mismatch, check definition in (:predicates ...)" raise SemanticError(self.lexer.lineno(), msg) dpred_body = p[4] if self.debug: - print("Body: {} type: {}".format(dpred_body, type(dpred_body))) + print(f"Body: {dpred_body} type: {type(dpred_body)}") self.instance.process_derived_predicate_skeleton(head_pred, var_list, dpred_body) # clear up scope for entry in var_list: @@ -1260,11 +1260,11 @@ def p_object_declaration(self, p): if isinstance(entry, tuple): typename, constant_list = entry if typename not in self.instance.types: - msg = "Error parsing (:objects ) section: type '{}' was not defined".format(typename) + msg = f"Error parsing (:objects ) section: type '{typename}' was not defined" raise SemanticError(self.lexer.lineno(), msg) self.instance.process_constant_definition(entry) else: - msg = "Error processing (:objects ) section: constant '{}' has no type attached".format(entry) + msg = f"Error processing (:objects ) section: constant '{entry}' has no type attached" raise SemanticError(self.lexer.lineno(), msg) if self.debug: total_constants = 0 @@ -1308,7 +1308,7 @@ def p_init_el(self, p): constant_term = self.instance.get(p[4]) p[0] = (p[3], constant_term) except tsk.LanguageError as e: - msg = "Error processing initial state: object '{}' was not defined".format(p[4]) + msg = f"Error processing initial state: object '{p[4]}' was not defined" raise SemanticError(self.lexer.lineno(), msg) def p_literal_of_name(self, p): @@ -1333,7 +1333,7 @@ def p_atomic_formula_of_name(self, p): try: func_symbol = self.instance.get(p[2]) except tsk.LanguageError as e: - msg = "Error parsing ground atomic formula: function '{}' is not defined".format(p[2]) + msg = f"Error parsing ground atomic formula: function '{p[2]}' is not defined" raise SemanticError(self.lexer.lineno(), msg) sub_terms = p[3] p[0] = { @@ -1416,9 +1416,9 @@ def p_error(self, p): # reached End of File return if self.debug: - print('Syntax error in input! See log file: {}'.format(self.logfile)) + print(f'Syntax error in input! See log file: {self.logfile}') - print('Syntax error in input! Line: {} failed token: {} next: {}'.format(p.lineno, p, self._parser.token())) + print(f'Syntax error in input! Line: {p.lineno} failed token: {p} next: {self._parser.token()}') while True: tok = self._parser.token() diff --git a/src/tarski/io/rddl.py b/src/tarski/io/rddl.py index fa46cce9..96ed9359 100644 --- a/src/tarski/io/rddl.py +++ b/src/tarski/io/rddl.py @@ -219,7 +219,7 @@ def __init__(self, filename): @staticmethod def _load_rddl_model(filename): - with open(filename, 'r', encoding='utf8') as input_file: + with open(filename, encoding='utf8') as input_file: rddl = input_file.read() parser = modules.import_pyrddl_parser()() parser.build() @@ -426,12 +426,12 @@ def write_model(self, filename): reward_expr=self.get_reward(), action_precondition_list=self.get_preconditions(), state_invariant_list=self.get_state_invariants(), - domain_non_fluents='{}_non_fluents'.format(self.task.instance_name), + domain_non_fluents=f'{self.task.instance_name}_non_fluents', object_list=self.get_objects(), non_fluent_expr=self.get_non_fluent_init(), instance_name=self.task.instance_name, init_state_fluent_expr=self.get_state_fluent_init(), - non_fluents_ref='{}_non_fluents'.format(self.task.instance_name), + non_fluents_ref=f'{self.task.instance_name}_non_fluents', max_nondef_actions=self.get_max_nondef_actions(), horizon=self.get_horizon(), discount=self.get_discount() @@ -451,7 +451,7 @@ def get_types(self): if isinstance(S, Interval): self.need_constraints[S.name] = S continue - type_decl_list += ['{} : {};'.format(S.name, parent(S).name)] + type_decl_list += [f'{S.name} : {parent(S).name};'] self.need_obj_decl += [S] return '\n'.join(type_decl_list) @@ -477,7 +477,7 @@ def get_signature(fl): else: assert False if len(domain) == 0: - return '{}'.format(head) + return f'{head}' return '{}({})'.format(head, ','.join(domain)) def get_pvars(self): @@ -485,30 +485,30 @@ def get_pvars(self): # state fluents for fl, v in self.task.state_fluents: rsig = self.get_signature(fl) - pvar_decl_list += ['\t{} : {{state-fluent, {}, default = {}}};'.format(rsig, self.get_type(fl), str(v))] + pvar_decl_list += [f'\t{rsig} : {{state-fluent, {self.get_type(fl)}, default = {str(v)}}};'] for fl, level in self.task.interm_fluents: rsig = self.get_signature(fl) try: self.interm_signatures.add(fl.symbol.signature) except AttributeError: self.interm_signatures.add(fl.predicate.signature) - pvar_decl_list += ['\t{} : {{interm-fluent, {}, level = {}}};'.format(rsig, self.get_type(fl), str(level))] + pvar_decl_list += [f'\t{rsig} : {{interm-fluent, {self.get_type(fl)}, level = {str(level)}}};'] for fl, v in self.task.action_fluents: rsig = self.get_signature(fl) - pvar_decl_list += ['\t{} : {{action-fluent, {}, default = {}}};'.format(rsig, self.get_type(fl), str(v))] + pvar_decl_list += [f'\t{rsig} : {{action-fluent, {self.get_type(fl)}, default = {str(v)}}};'] for fl, v in self.task.non_fluents: rsig = self.get_signature(fl) try: self.non_fluent_signatures.add(fl.symbol.signature) except AttributeError: self.non_fluent_signatures.add(fl.predicate.signature) - pvar_decl_list += ['\t{} : {{non-fluent, {}, default = {}}};'.format(rsig, self.get_type(fl), str(v))] + pvar_decl_list += [f'\t{rsig} : {{non-fluent, {self.get_type(fl)}, default = {str(v)}}};'] return '\n'.join(pvar_decl_list) def get_cpfs(self): cpfs_decl_list = [] for lhs, rhs in self.task.cpfs: - cpfs_decl_list += ['\t{} = {};'.format(self.get_fluent(lhs, True), self.rewrite(rhs))] + cpfs_decl_list += [f'\t{self.get_fluent(lhs, True)} = {self.rewrite(rhs)};'] return '\n'.join(cpfs_decl_list) def get_reward(self): @@ -518,14 +518,14 @@ def get_preconditions(self): act_prec_list = [] for expr, ctype in self.task.constraints: if ctype == ConstraintType.ACTION: - act_prec_list += ['\t{};'.format(self.rewrite(expr))] + act_prec_list += [f'\t{self.rewrite(expr)};'] return '\n'.join(act_prec_list) def get_state_invariants(self): state_inv_list = [] for expr, ctype in self.task.constraints: if ctype == ConstraintType.STATE: - state_inv_list += ['\t{};'.format(self.rewrite(expr))] + state_inv_list += [f'\t{self.rewrite(expr)};'] return '\n'.join(state_inv_list) def get_objects(self): @@ -537,7 +537,7 @@ def get_objects(self): # initialize for S in self.need_obj_decl: domain_str = ','.join([str(c.symbol) for c in S.domain()]) - obj_decl_blocks += ['\t{} : {{{}}};'.format(S.name, domain_str)] + obj_decl_blocks += [f'\t{S.name} : {{{domain_str}}};'] return 'objects {{{}}};'.format('\n'.join(obj_decl_blocks)) @@ -552,7 +552,7 @@ def get_non_fluent_init(self): term_str = signature[0] else: term_str = str(self.task.L.get(signature[0])(*subterms)) - non_fluent_init_list += ['\t{} = {};'.format(term_str, value)] + non_fluent_init_list += [f'\t{term_str} = {value};'] for signature, defs in self.task.x0.predicate_extensions.items(): if signature not in self.non_fluent_signatures: continue @@ -562,7 +562,7 @@ def get_non_fluent_init(self): atom_str = signature[0] else: atom_str = str(self.task.L.get(signature[0])(*subterms)) - non_fluent_init_list += ['\t{} = true;'.format(atom_str)] + non_fluent_init_list += [f'\t{atom_str} = true;'] if len(non_fluent_init_list) == 0: return '' @@ -581,7 +581,7 @@ def get_state_fluent_init(self): term_str = signature[0] else: term_str = str(self.task.L.get(signature[0])(*subterms)) - init_list += ['\t{} = {};'.format(term_str, value)] + init_list += [f'\t{term_str} = {value};'] for signature, defs in self.task.x0.predicate_extensions.items(): if signature in self.non_fluent_signatures \ or signature in self.interm_signatures: @@ -592,7 +592,7 @@ def get_state_fluent_init(self): atom_str = signature[0] else: atom_str = str(self.task.L.get(signature[0])(*subterms)) - init_list += ['\t{} = true;'.format(atom_str)] + init_list += [f'\t{atom_str} = true;'] return '\n'.join(init_list) @@ -603,7 +603,7 @@ def rewrite(self, expr): re_st = [self.rewrite(st) for st in expr.subterms] if expr.symbol.builtin: if expr.symbol.symbol in symbol_map.keys(): - return '({} {} {})'.format(re_st[0], symbol_map[expr.symbol.symbol], re_st[1]) + return f'({re_st[0]} {symbol_map[expr.symbol.symbol]} {re_st[1]})' st_str = '' if expr.symbol.builtin: if expr.symbol.symbol in function_map.keys(): @@ -614,19 +614,19 @@ def rewrite(self, expr): st_str = '({})'.format(','.join(re_st)) else: st_str = '[{}]'.format(','.join(re_st)) - return '{}{}'.format(function_map[expr.symbol.symbol], st_str) + return f'{function_map[expr.symbol.symbol]}{st_str}' if len(re_st) > 0: st_str = '({})'.format(','.join(re_st)) - return '{}{}'.format(expr.symbol.signature[0], st_str) + return f'{expr.symbol.signature[0]}{st_str}' elif isinstance(expr, Atom): re_st = [self.rewrite(st) for st in expr.subterms] if expr.predicate.builtin: if expr.predicate.symbol in symbol_map.keys(): - return '({} {} {})'.format(re_st[0], symbol_map[expr.predicate.symbol], re_st[1]) + return f'({re_st[0]} {symbol_map[expr.predicate.symbol]} {re_st[1]})' st_str = '' if len(re_st) > 0: st_str = '({})'.format(','.join(re_st)) - return '{}{}'.format(expr.predicate.signature[0], st_str) + return f'{expr.predicate.signature[0]}{st_str}' elif isinstance(expr, Variable): # remove ? just in case return '?{}'.format(expr.symbol.replace('?', '')) @@ -636,7 +636,7 @@ def rewrite(self, expr): cond = self.rewrite(expr.condition) expr1 = self.rewrite(expr.subterms[0]) expr2 = self.rewrite(expr.subterms[1]) - return 'if ({}) then ({}) else ({})'.format(cond, expr1, expr2) + return f'if ({cond}) then ({expr1}) else ({expr2})' elif isinstance(expr, Tautology): return 'true' elif isinstance(expr, Contradiction): @@ -645,16 +645,16 @@ def rewrite(self, expr): re_sf = [self.rewrite(st) for st in expr.subformulas] re_sym = symbol_map[expr.connective] if len(re_sf) == 1: - return '{}{}'.format(re_sym, re_sf) - return '({} {} {})'.format(re_sf[0], re_sym, re_sf[1]) + return f'{re_sym}{re_sf}' + return f'({re_sf[0]} {re_sym} {re_sf[1]})' elif isinstance(expr, QuantifiedFormula): re_f = self.rewrite(expr.formula) - re_vars = ['?{} : {}'.format(x.symbol, x.sort.name) for x in expr.variables] + re_vars = [f'?{x.symbol} : {x.sort.name}' for x in expr.variables] re_sym = symbol_map[expr.quantifier] return '{}_{{{}}} ({})'.format(re_sym, ','.join(re_vars), re_f) elif isinstance(expr, AggregateCompoundTerm): re_expr = self.rewrite(expr.subterm) - re_vars = ['?{} : {}'.format(x.symbol, x.sort.name) for x in expr.bound_vars] + re_vars = [f'?{x.symbol} : {x.sort.name}' for x in expr.bound_vars] if expr.symbol == BFS.ADD: re_sym = 'sum' elif expr.symbol == BFS.MUL: @@ -681,7 +681,7 @@ def get_fluent(fl, next_state=False): prima = '' if next_state: prima = "'" - return "{}{}{}".format(head, prima, subterms_str) + return f"{head}{prima}{subterms_str}" def get_max_nondef_actions(self): return str(self.task.parameters.max_nondef_actions) diff --git a/src/tarski/ndl/temporal.py b/src/tarski/ndl/temporal.py index 7a9a90ac..bb4dcf70 100644 --- a/src/tarski/ndl/temporal.py +++ b/src/tarski/ndl/temporal.py @@ -30,10 +30,10 @@ def __init__(self, **kwargs): self.td = kwargs['td'] self.r = kwargs['r'] if not isinstance(self.r, CompoundTerm): - raise NDLSyntaxError("NDL Syntactic Error: resource lock needs to be a term (given: {})".format(self.r)) + raise NDLSyntaxError(f"NDL Syntactic Error: resource lock needs to be a term (given: {self.r})") def __str__(self): - return "LOCK {} AFTER {} FOR {}".format(self.r, self.ts, self.td) + return f"LOCK {self.r} AFTER {self.ts} FOR {self.td}" class ResourceLevel: @@ -43,17 +43,17 @@ def __init__(self, **kwargs): self.td = kwargs['td'] self.r = kwargs['r'] if not isinstance(self.r, CompoundTerm): - raise NDLSyntaxError("NDL Syntactic Error: resource lock must refer to term (given: {})".format(self.r)) + raise NDLSyntaxError(f"NDL Syntactic Error: resource lock must refer to term (given: {self.r})") self.n = kwargs['n'] if not isinstance(self.n, Constant): - raise NDLSyntaxError("NDL Syntactic Error: resource level must be a constant (given: {}".format(self.n)) + raise NDLSyntaxError(f"NDL Syntactic Error: resource level must be a constant (given: {self.n}") if self.n.sort != self.r.sort: raise NDLSyntaxError( "NDL Type Mismatch: resource and level have different sorts (resource is: {}, level is: {}".format( self.r.sort, self.n.sort)) def __str__(self): - return "LOCK {} AFTER {} FOR {}".format(self.r, self.ts, self.td) + return f"LOCK {self.r} AFTER {self.ts} FOR {self.td}" class SetLiteralEffect: @@ -66,7 +66,7 @@ def __init__(self, lit, value): self.value = value def __str__(self): - return "SET({}, {})".format(self.lit, self.value) + return f"SET({self.lit}, {self.value})" class AssignValueEffect: @@ -79,7 +79,7 @@ def __init__(self, atom, value): self.value = value def __str__(self): - return "ASSIGN({}, {})".format(self.atom, self.value) + return f"ASSIGN({self.atom}, {self.value})" class UniversalEffect: @@ -92,7 +92,7 @@ def __init__(self, variable, effect): self.eff = effect def __str__(self): - return "FORALL({}, {})".format(self.var, self.effect) + return f"FORALL({self.var}, {self.effect})" class ConditionalEffect: @@ -106,7 +106,7 @@ def __init__(self, cond, then_eff, else_eff): self.else_eff = else_eff def __str__(self): - return "IF ({}) \nTHEN {}\n ELSE {}".format(self.condition, self.then_eff, self.else_eff) + return f"IF ({self.condition}) \nTHEN {self.then_eff}\n ELSE {self.else_eff}" class TimedEffect: @@ -119,7 +119,7 @@ def __init__(self, delay, eff): self.eff = eff def __str__(self): - return "AFTER {} APPLY {}".format(self.delay, self.eff) + return f"AFTER {self.delay} APPLY {self.eff}" class UnionExpression: @@ -190,13 +190,13 @@ def __init__(self, **kwargs): self.levels += [req] self.max_eff_time = max(self.max_eff_time, req.eff.td) else: - raise NDLSyntaxError("NDL syntax error: '{}' is not a resource lock or level request".format(req)) + raise NDLSyntaxError(f"NDL syntax error: '{req}' is not a resource lock or level request") # effects self.untimed_effects = [] self.timed_effects = [] for eff in kwargs['timed_effects']: if not isinstance(eff, TimedEffect): - raise NDLSyntaxError("NDL Syntax error: eff '{}' must be timed".format(eff)) + raise NDLSyntaxError(f"NDL Syntax error: eff '{eff}' must be timed") self.timed_effects += [eff] self.max_eff_time = max(self.max_eff_time, eff.delay) wrapped_effect = eff.eff @@ -205,7 +205,7 @@ def __init__(self, **kwargs): elif isinstance(wrapped_effect, SetLiteralEffect): self.effect_times[(symref(wrapped_effect.lit), wrapped_effect.value)] = eff.delay else: - raise NotImplementedError("Effects of type {} cannot be handled yet".format(type(wrapped_effect))) + raise NotImplementedError(f"Effects of type {type(wrapped_effect)} cannot be handled yet") for elem in kwargs['untimed_effects']: self.untimed_effects += [(0, elem)] diff --git a/src/tarski/rddl/task.py b/src/tarski/rddl/task.py index 50ce544c..fcc4375d 100644 --- a/src/tarski/rddl/task.py +++ b/src/tarski/rddl/task.py @@ -1,4 +1,3 @@ - from ..fol import FirstOrderLanguage from ..io import rddl from ..model import Model diff --git a/src/tarski/reachability/__init__.py b/src/tarski/reachability/__init__.py index 9b9907a8..96b5fd27 100644 --- a/src/tarski/reachability/__init__.py +++ b/src/tarski/reachability/__init__.py @@ -1,4 +1,3 @@ - from .asp import create_reachability_lp from .clingo_wrapper import run_clingo, parse_model diff --git a/src/tarski/reachability/asp.py b/src/tarski/reachability/asp.py index 03d1871c..ecdd379b 100644 --- a/src/tarski/reachability/asp.py +++ b/src/tarski/reachability/asp.py @@ -45,7 +45,7 @@ def __init__(self, problem: Problem, lp, include_variable_inequalities=False, in def gen_aux_atom(self, args=None): """ Return a new auxiliary atom with the given arguments """ self.aux_atom_count += 1 - return self.lp_atom("__f{}".format(self.aux_atom_count), args) + return self.lp_atom(f"__f{self.aux_atom_count}", args) def create(self): problem, lang, lp = self.problem, self.problem.language, self.lp @@ -127,7 +127,7 @@ def process_action(self, action, lang, lp): def process_action_cost(self, action, action_atom, parameters_types, lp): """ Process the increase-total-cost effect of the given action. This results in a LP atom of the form cost(action(X), 7) :- block(X). """ - used_varnames = set(make_variable_name(v.symbol) for v in action.parameters) + used_varnames = {make_variable_name(v.symbol) for v in action.parameters} if action.cost is None: lp.rule(f'cost({action_atom}, 1)', parameters_types) elif isinstance(action.cost, AdditiveActionCost): @@ -191,7 +191,7 @@ def process_formula(self, f: Formula): return [negate_lp_atom(processed)] else: - raise RuntimeError('Unexpected connective "{}" within CompoundFormula "{}"'.format(f.connective, f)) + raise RuntimeError(f'Unexpected connective "{f.connective}" within CompoundFormula "{f}"') elif isinstance(f, QuantifiedFormula): if f.quantifier == Quantifier.Exists: @@ -208,7 +208,7 @@ def process_formula(self, f: Formula): assert f.quantifier == Quantifier.Forall raise RuntimeError('Formula should be forall-free, revise source code') - raise RuntimeError('Unexpected formula "{}" with type "{}"'.format(f, type(f))) + raise RuntimeError(f'Unexpected formula "{f}" with type "{type(f)}"') @staticmethod def process_term(t: Term): @@ -220,7 +220,7 @@ def process_term(t: Term): elif isinstance(t, Constant): return str(t.symbol) - raise RuntimeError('Unexpected term "{}" with type "{}"'.format(t, type(t))) + raise RuntimeError(f'Unexpected term "{t}" with type "{type(t)}"') def process_effect(self, lang, eff, action_name): """ Process a given effect and return the corresponding LP rule (a pair with head and body). For instance a @@ -301,9 +301,9 @@ def __init__(self, symbol: str, args=None, infix=False): def __str__(self): """ Return a string of the form 'symbol(arg1, ..., argn)', or 'symbol()', if args is empty """ if self.infix: - return "{} {} {}".format(self.args[0], self.symbol, self.args[1]) + return f"{self.args[0]} {self.symbol} {self.args[1]}" arglist = ", ".join(str(arg) for arg in _ensure_list(self.args)) - return "{}({})".format(self.symbol, arglist) + return f"{self.symbol}({arglist})" __repr__ = __str__ @@ -400,7 +400,7 @@ def rule(self, head, body=None): def _print_rule(head, body): assert body is None or isinstance(body, (list, tuple)) - return "{}.".format(head) if body is None else "{} :- {}.".format(head, _print_body(body)) + return f"{head}." if body is None else f"{head} :- {_print_body(body)}." def _print_body(body): @@ -415,7 +415,7 @@ def sanitize(name: str): def _var(i=0): """ Return a distinct variable name for each given value of i """ alphabet = "XYZABCDEFGHIJKLMNOPQRSTUVW" - return alphabet[i] if i < len(alphabet) else "X{}".format(i) + return alphabet[i] if i < len(alphabet) else f"X{i}" def generate_varname(avoid=None): diff --git a/src/tarski/reachability/clingo_wrapper.py b/src/tarski/reachability/clingo_wrapper.py index f19277cb..ec99c7bd 100644 --- a/src/tarski/reachability/clingo_wrapper.py +++ b/src/tarski/reachability/clingo_wrapper.py @@ -24,7 +24,7 @@ def get_gringo_command(): else: gringo = shutil.which("gringo") command = [gringo] if gringo else None - logging.debug('Using gringo binary found in "{}"'.format(gringo)) + logging.debug(f'Using gringo binary found in "{gringo}"') return command @@ -51,7 +51,7 @@ def run_clingo(lp): return model_filename, theory_filename if os.path.isfile(stderr.name): - with open(stderr.name, 'r', encoding='utf8') as file: + with open(stderr.name, encoding='utf8') as file: errlog = file.read() if 'std::bad_alloc' in errlog: @@ -64,7 +64,7 @@ def run_clingo(lp): def parse_model(*, filename=None, content=None, symbol_mapping): if filename and not content: - with open(filename, "r", encoding='utf8') as f: + with open(filename, encoding='utf8') as f: return _parse_model(f, symbol_mapping) elif content and not filename: return _parse_model(content.splitlines(), symbol_mapping) diff --git a/src/tarski/sas/action.py b/src/tarski/sas/action.py index 65be13a7..ad575888 100644 --- a/src/tarski/sas/action.py +++ b/src/tarski/sas/action.py @@ -53,7 +53,7 @@ def __init__(self, **kwargs): self._effects = kwargs.get('effects', []) for eff in self._effects: if not isinstance(eff, Effect): - raise RuntimeError("Action effect has invalid type: got: {} expected: Effect".format(type(eff))) + raise RuntimeError(f"Action effect has invalid type: got: {type(eff)} expected: Effect") @property def name(self): diff --git a/src/tarski/search/__init__.py b/src/tarski/search/__init__.py index 7181fb4e..99939e59 100644 --- a/src/tarski/search/__init__.py +++ b/src/tarski/search/__init__.py @@ -1,3 +1,2 @@ - from .model import SearchModel, GroundForwardSearchModel from .blind import BreadthFirstSearch diff --git a/src/tarski/search/model.py b/src/tarski/search/model.py index 6ad2e840..ded1cbf6 100644 --- a/src/tarski/search/model.py +++ b/src/tarski/search/model.py @@ -1,4 +1,3 @@ - from .operations import is_applicable, progress from ..evaluators.simple import evaluate diff --git a/src/tarski/syntax/__init__.py b/src/tarski/syntax/__init__.py index 2db60cfc..668e8731 100644 --- a/src/tarski/syntax/__init__.py +++ b/src/tarski/syntax/__init__.py @@ -1,4 +1,3 @@ - from .function import Function from .predicate import Predicate from .sorts import Sort, Interval, inclusion_closure diff --git a/src/tarski/syntax/algebra/matrix.py b/src/tarski/syntax/algebra/matrix.py index bd8ee68f..a71d383e 100644 --- a/src/tarski/syntax/algebra/matrix.py +++ b/src/tarski/syntax/algebra/matrix.py @@ -43,7 +43,7 @@ def __getitem__(self, arg): return self.matrix[i, j] def __str__(self): - return '{}'.format(self.matrix) + return f'{self.matrix}' __repr__ = __str__ diff --git a/src/tarski/syntax/arithmetic/__init__.py b/src/tarski/syntax/arithmetic/__init__.py index 3f95d88e..d7fcf967 100644 --- a/src/tarski/syntax/arithmetic/__init__.py +++ b/src/tarski/syntax/arithmetic/__init__.py @@ -218,4 +218,4 @@ def simplify(expr: Term) -> Term: expr.subterms = (simplify(expr.subterms[0]), simplify(expr.subterms[1])) return expr - raise NotImplementedError("Can't handle expression {} yet".format(expr)) + raise NotImplementedError(f"Can't handle expression {expr} yet") diff --git a/src/tarski/syntax/arithmetic/random.py b/src/tarski/syntax/arithmetic/random.py index 6b508dae..bb67ad35 100644 --- a/src/tarski/syntax/arithmetic/random.py +++ b/src/tarski/syntax/arithmetic/random.py @@ -1,4 +1,3 @@ - from ..builtins import BuiltinFunctionSymbol as bfs from ... import modules diff --git a/src/tarski/syntax/formulas.py b/src/tarski/syntax/formulas.py index df0d5f31..abb59cbb 100644 --- a/src/tarski/syntax/formulas.py +++ b/src/tarski/syntax/formulas.py @@ -102,7 +102,7 @@ def __init__(self, connective, subformulas): def _check_well_formed(self): if any(not isinstance(f, Formula) for f in self.subformulas): - raise err.LanguageError("Wrong argument types for compound formula: '{}' ".format(self.subformulas)) + raise err.LanguageError(f"Wrong argument types for compound formula: '{self.subformulas}' ") if self.connective == Connective.Not: if len(self.subformulas) != 1: @@ -113,10 +113,10 @@ def _check_well_formed(self): def __str__(self): if self.connective == Connective.Not: assert len(self.subformulas) == 1 - return "({} {})".format(self.connective, str(self.subformulas[0])) + return f"({self.connective} {str(self.subformulas[0])})" - inner = " {} ".format(self.connective).join(str(f) for f in self.subformulas) - return "({})".format(inner) + inner = f" {self.connective} ".join(str(f) for f in self.subformulas) + return f"({inner})" __repr__ = __str__ @@ -148,7 +148,7 @@ def _check_well_formed(self): def __str__(self): vars_ = ', '.join(str(x) for x in self.variables) - return '{} {} : ({})'.format(self.quantifier, vars_, self.formula) + return f'{self.quantifier} {vars_} : ({self.formula})' __repr__ = __str__ @@ -244,10 +244,10 @@ def quantified(quantifier, *args): variables, formula = args[:-1], args[-1] if not isinstance(formula, Formula): - raise err.LanguageError('Illformed arguments for quantified formula: {}'.format(args)) + raise err.LanguageError(f'Illformed arguments for quantified formula: {args}') if not all(isinstance(x, Variable) for x in variables): - raise err.LanguageError('Illformed arguments for quantified formula: {}'.format(args)) + raise err.LanguageError(f'Illformed arguments for quantified formula: {args}') return QuantifiedFormula(quantifier, variables, args[-1]) @@ -308,7 +308,7 @@ def _check_well_formed(self): head = self.predicate if not isinstance(head, Predicate): - raise err.LanguageError("Incorrect atom head: '{}' ".format(head)) + raise err.LanguageError(f"Incorrect atom head: '{head}' ") # Check arities match if len(self.subterms) != self.predicate.arity: @@ -319,7 +319,7 @@ def _check_well_formed(self): # Check arguments are all terms of the appropriate type and matching language for arg, expected_sort in zip(self.subterms, head.sort): if not isinstance(arg, Term): - raise err.LanguageError("Wrong argument for atomic formula: '{}' ".format(arg)) + raise err.LanguageError(f"Wrong argument for atomic formula: '{arg}' ") if arg.language != language: raise err.LanguageMismatch(arg, arg.language, language) diff --git a/src/tarski/syntax/function.py b/src/tarski/syntax/function.py index 4159f38d..627e0df2 100644 --- a/src/tarski/syntax/function.py +++ b/src/tarski/syntax/function.py @@ -1,4 +1,3 @@ - from ..errors import LanguageError, LanguageMismatch from .sorts import Sort diff --git a/src/tarski/syntax/predicate.py b/src/tarski/syntax/predicate.py index a3a92f2d..72c17a13 100644 --- a/src/tarski/syntax/predicate.py +++ b/src/tarski/syntax/predicate.py @@ -1,4 +1,3 @@ - from ..errors import LanguageError, LanguageMismatch from .sorts import Sort diff --git a/src/tarski/syntax/sorts.py b/src/tarski/syntax/sorts.py index 28877bd9..2f08275a 100644 --- a/src/tarski/syntax/sorts.py +++ b/src/tarski/syntax/sorts.py @@ -16,7 +16,7 @@ def __init__(self, name, language, builtin=False): self.builtin = builtin def __str__(self): - return 'Sort({})'.format(self.name) + return f'Sort({self.name})' __repr__ = __str__ @@ -111,7 +111,7 @@ def cast(self, x): # pass y = self.encode(x) # can raise ValueError if not self.is_within_bounds(y): - raise ValueError("Cast: Symbol '{}' (encoded '{}') outside of defined interval bounds".format(x, y)) + raise ValueError(f"Cast: Symbol '{x}' (encoded '{y}') outside of defined interval bounds") return y def to_constant(self, x): @@ -238,8 +238,7 @@ def compute_signature_bindings(signature): """ Return an exhaustive list of all possible bindings compatible with the given signature, i.e. list of sorts. """ domains = [s.domain() for s in signature] - for binding in itertools.product(*domains): - yield binding + yield from itertools.product(*domains) def compute_direct_sort_map(lang): diff --git a/src/tarski/syntax/symrefs.py b/src/tarski/syntax/symrefs.py index 7f6f814d..dacc41f3 100644 --- a/src/tarski/syntax/symrefs.py +++ b/src/tarski/syntax/symrefs.py @@ -1,4 +1,3 @@ - from .formulas import Formula from .terms import Term @@ -29,6 +28,6 @@ def __eq__(self, other): return self.__class__ is other.__class__ and self.expr.is_syntactically_equal(other.expr) def __str__(self): - return "symref[{}]".format(self.expr) + return f"symref[{self.expr}]" __repr__ = __str__ diff --git a/src/tarski/syntax/temporal/ltl.py b/src/tarski/syntax/temporal/ltl.py index 5cc5a546..daa9b5b5 100644 --- a/src/tarski/syntax/temporal/ltl.py +++ b/src/tarski/syntax/temporal/ltl.py @@ -1,4 +1,3 @@ - from enum import Enum from ... import errors as err from ..formulas import Formula, Connective, CompoundFormula, lor @@ -18,23 +17,23 @@ def __init(self, conn, sub): def _check_well_formed(self): if any(not isinstance(f, Formula) for f in self.subformulas): - raise err.LanguageError("Wrong argument types for compound formula: '{}' ".format(self.subformulas)) + raise err.LanguageError(f"Wrong argument types for compound formula: '{self.subformulas}' ") if self.connective == Connective.Not or \ self.connective in (TemporalConnective.X, TemporalConnective.F, TemporalConnective.G): if len(self.subformulas) != 1: - raise err.LanguageError("{} admits only one subformula".format(str(self.connective))) + raise err.LanguageError(f"{str(self.connective)} admits only one subformula") elif len(self.subformulas) < 2: - raise err.LanguageError("{} requires at least two subformulas".format(str(self.connective))) + raise err.LanguageError(f"{str(self.connective)} requires at least two subformulas") def __str__(self): if self.connective == Connective.Not or \ self.connective in (TemporalConnective.X, TemporalConnective.F, TemporalConnective.G): assert len(self.subformulas) == 1 - return "{} ({})".format(self.connective, str(self.subformulas[0])) + return f"{self.connective} ({str(self.subformulas[0])})" - inner = " {} ".format(self.connective).join(str(f) for f in self.subformulas) - return "({})".format(inner) + inner = f" {self.connective} ".join(str(f) for f in self.subformulas) + return f"({inner})" def X(arg): diff --git a/src/tarski/syntax/terms.py b/src/tarski/syntax/terms.py index 85d15b0c..ce435a54 100644 --- a/src/tarski/syntax/terms.py +++ b/src/tarski/syntax/terms.py @@ -152,7 +152,7 @@ def __str__(self): return str(self.symbol) def __repr__(self): - return '{} ({})'.format(self.symbol, self.sort.name) + return f'{self.symbol} ({self.sort.name})' def hash(self): return hash((self.symbol, self.sort.name)) @@ -333,7 +333,7 @@ def __str__(self): return str(self.name) def __repr__(self): - return '{} ({})'.format(self.name, self.sort.name) + return f'{self.name} ({self.sort.name})' def hash(self): return hash(self.signature) diff --git a/src/tarski/syntax/transform/__init__.py b/src/tarski/syntax/transform/__init__.py index 1bd491ca..3242df1c 100644 --- a/src/tarski/syntax/transform/__init__.py +++ b/src/tarski/syntax/transform/__init__.py @@ -1,4 +1,3 @@ - from .nnf import NNFTransformation, to_negation_normal_form from .cnf import CNFTransformation, to_conjunctive_normal_form from .prenex import PrenexTransformation, to_prenex_negation_normal_form diff --git a/src/tarski/syntax/transform/prenex.py b/src/tarski/syntax/transform/prenex.py index 37b6b2e5..807f9736 100644 --- a/src/tarski/syntax/transform/prenex.py +++ b/src/tarski/syntax/transform/prenex.py @@ -28,7 +28,7 @@ def _merge_quantified_subformulas(self, lhs, rhs, renaming=True): new_variables[key_y] = y else: if renaming: - y2 = self.L.variable("{}'".format(y.symbol), y.sort) + y2 = self.L.variable(f"{y.symbol}'", y.sort) subst[y] = y2 new_variables[(y2.symbol, y2.sort.name)] = y2 if len(subst) > 0: @@ -49,7 +49,7 @@ def _nest_quantifiers(self, out_q, out_vars, out_phi, inner_q, inner_vars, conn, if key_y not in in_vars_dict: new_out_vars.append(y) else: - y2 = self.L.variable("{}'".format(y.symbol), y.sort) + y2 = self.L.variable(f"{y.symbol}'", y.sort) subst[symref(y)] = y2 new_out_vars.append(y2) if len(subst) > 0: diff --git a/src/tarski/syntax/transform/simplifications.py b/src/tarski/syntax/transform/simplifications.py index 88280dcf..bd542627 100644 --- a/src/tarski/syntax/transform/simplifications.py +++ b/src/tarski/syntax/transform/simplifications.py @@ -1,4 +1,3 @@ - from ...syntax import Atom, CompoundFormula, Connective, Constant, CompoundTerm from ...syntax.builtins import BuiltinPredicateSymbol diff --git a/src/tarski/syntax/transform/substitutions.py b/src/tarski/syntax/transform/substitutions.py index 39bdcec4..c39b5264 100644 --- a/src/tarski/syntax/transform/substitutions.py +++ b/src/tarski/syntax/transform/substitutions.py @@ -1,4 +1,3 @@ - import itertools from typing import List diff --git a/src/tarski/syntax/util.py b/src/tarski/syntax/util.py index af886c80..551c2f66 100644 --- a/src/tarski/syntax/util.py +++ b/src/tarski/syntax/util.py @@ -1,4 +1,3 @@ - import itertools diff --git a/src/tarski/syntax/visitors.py b/src/tarski/syntax/visitors.py index e35cb02b..da965b92 100644 --- a/src/tarski/syntax/visitors.py +++ b/src/tarski/syntax/visitors.py @@ -1,4 +1,3 @@ - from tarski.syntax import symref, QuantifiedFormula, CompoundTerm, Variable, CompoundFormula, Atom from tarski.syntax.formulas import is_eq_atom diff --git a/src/tarski/util.py b/src/tarski/util.py index d3642548..293b1810 100644 --- a/src/tarski/util.py +++ b/src/tarski/util.py @@ -29,7 +29,7 @@ def dump(self): return [str(o) for o in self.data.keys()] def __str__(self): - return ','.join('{}: {}'.format(idx, o) for o, idx in self.data.items()) + return ','.join(f'{idx}: {o}' for o, idx in self.data.items()) __repr__ = __str__ diff --git a/src/tarski/utils/__init__.py b/src/tarski/utils/__init__.py index fe120bde..73e22db9 100644 --- a/src/tarski/utils/__init__.py +++ b/src/tarski/utils/__init__.py @@ -1,2 +1 @@ - from .helpers import parse_model diff --git a/src/tarski/utils/algorithms.py b/src/tarski/utils/algorithms.py index 3942237a..7183ac14 100644 --- a/src/tarski/utils/algorithms.py +++ b/src/tarski/utils/algorithms.py @@ -5,7 +5,7 @@ def transitive_closure(elements): closure = set(elements) while True: - closure_until_now = closure | set((x, w) for x, y in closure for q, w in closure if q == y) + closure_until_now = closure | {(x, w) for x, y in closure for q, w in closure if q == y} if len(closure_until_now) == len(closure): break diff --git a/src/tarski/utils/command.py b/src/tarski/utils/command.py index 5956535a..8a3d5a60 100644 --- a/src/tarski/utils/command.py +++ b/src/tarski/utils/command.py @@ -42,9 +42,9 @@ def execute(command, **kwargs): msg = 'Executing "{}" on directory "{}"'.format(' '.join(command), cwd) if stdout: - msg += '. Standard output redirected to "{}"'.format(stdout.name) + msg += f'. Standard output redirected to "{stdout.name}"' if stderr: - msg += '. Standard error redirected to "{}"'.format(stderr.name) + msg += f'. Standard error redirected to "{stderr.name}"' logging.debug(msg) retcode = subprocess.call(command, cwd=cwd, stdout=stdout, stderr=stderr) diff --git a/src/tarski/utils/hashing.py b/src/tarski/utils/hashing.py index 223c85eb..af3c65be 100644 --- a/src/tarski/utils/hashing.py +++ b/src/tarski/utils/hashing.py @@ -1,4 +1,3 @@ - import hashlib import sys diff --git a/src/tarski/utils/resources.py b/src/tarski/utils/resources.py index d205eac0..a9867eef 100644 --- a/src/tarski/utils/resources.py +++ b/src/tarski/utils/resources.py @@ -39,7 +39,7 @@ def __str__(self): current_in_mb = current / (1024 * 1024) rss_in_mb = (current - self.start_mem) / (1024 * 1024) - return "[%.2fs CPU, %.2fs wall-clock, diff: %.2fMB, curr: %.2fMB]" % ( + return "[{:.2f}s CPU, {:.2f}s wall-clock, diff: {:.2f}MB, curr: {:.2f}MB]".format( self._clock() - self.start_clock, time.time() - self.start_time, rss_in_mb, current_in_mb) diff --git a/src/tarski/utils/serialization.py b/src/tarski/utils/serialization.py index 4fef28db..818f7d3a 100644 --- a/src/tarski/utils/serialization.py +++ b/src/tarski/utils/serialization.py @@ -1,5 +1,3 @@ - - def serialize_atom(atom): """ Return a comma-separated serialization of a given atom, e.g. from atom "on(a,b)", it will return "on,a,b". """ if not atom.subterms: diff --git a/src/tarski/version.py b/src/tarski/version.py index 4720c1b3..6d389260 100644 --- a/src/tarski/version.py +++ b/src/tarski/version.py @@ -1,3 +1,2 @@ - __version_info__ = (0, 7, 0) __version__ = '.'.join(map(str, __version_info__)) From e591cd1fd80dee417ef0882133e459002a83d303 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 11:15:33 +0100 Subject: [PATCH 04/14] Upgrade test code to py3.6 --- tests/common/benchmarks.py | 2 -- tests/common/blocksworld.py | 4 ++-- tests/common/gridworld.py | 1 - tests/common/gripper.py | 3 +-- tests/common/numeric.py | 4 ++-- tests/common/simple.py | 1 - tests/common/spider.py | 1 - tests/fol/test_interpretations.py | 1 - tests/fol/test_syntactic_analysis.py | 1 - tests/fol/test_syntax.py | 1 - tests/fol/test_variable_bindings.py | 2 -- tests/fstrips/hybrid/tasks.py | 4 ++-- tests/fstrips/hybrid/test_differential.py | 4 ++-- tests/fstrips/hybrid/test_reactions.py | 2 +- tests/fstrips/test_problem_grounding.py | 2 +- tests/fstrips/test_representation.py | 2 +- tests/fstrips/test_simplify.py | 6 +++--- tests/io/test_fstrips_parsing.py | 11 +++++------ tests/io/test_fstrips_writer.py | 4 ++-- tests/io/test_pddl_parsing.py | 2 +- tests/ndl/test_temporal.py | 6 +++--- tests/reachability/test_reachability_lp.py | 3 +-- tests/sas/test_action.py | 12 ++++++------ tests/search/test_search_models.py | 2 +- tests/test_utils.py | 1 - tests/transforms/test_syntax_transformations.py | 10 +++++----- 26 files changed, 39 insertions(+), 53 deletions(-) diff --git a/tests/common/benchmarks.py b/tests/common/benchmarks.py index 2d1a9e92..6e50e008 100644 --- a/tests/common/benchmarks.py +++ b/tests/common/benchmarks.py @@ -1,5 +1,3 @@ - - def get_lenient_benchmarks(): """ Return a list of IPC domains that require non-strict PDDL parsing because of missing requirement flags or similar minor bugs, which would result in a parsing error. """ diff --git a/tests/common/blocksworld.py b/tests/common/blocksworld.py index b5341fda..8036b2ae 100644 --- a/tests/common/blocksworld.py +++ b/tests/common/blocksworld.py @@ -17,7 +17,7 @@ def generate_bw_loc_and_clear(num_blocks): # Table and blocks lang.constant('table', lang.Object) lang.constant('hand', lang.Object) - _ = [lang.constant('b{}'.format(k), lang.Object) for k in range(1, num_blocks + 1)] + _ = [lang.constant(f'b{k}', lang.Object) for k in range(1, num_blocks + 1)] return lang @@ -27,7 +27,7 @@ def create_4blocks_task(): loc = bw.get_function('loc') clear = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) table = bw.get_constant('table') hand = bw.get_constant('hand') diff --git a/tests/common/gridworld.py b/tests/common/gridworld.py index b7933f0f..9d79b205 100644 --- a/tests/common/gridworld.py +++ b/tests/common/gridworld.py @@ -1,4 +1,3 @@ - from tarski.fstrips import create_fstrips_problem, language from tarski.theories import Theory from tarski.syntax import Tautology, land diff --git a/tests/common/gripper.py b/tests/common/gripper.py index 5a687d9c..e81853b7 100644 --- a/tests/common/gripper.py +++ b/tests/common/gripper.py @@ -1,4 +1,3 @@ - import tarski.model from tarski import fstrips as fs from tarski.syntax import land @@ -46,7 +45,7 @@ def create_sample_problem(): problem.init = init problem.goal = land(at(ball1, roomb), at(ball2, roomb), at(ball3, roomb), at(ball4, roomb)) - from_, to, o, r, g = [lang.variable(x, lang.Object) for x in ["from", "to", "o", "r", "g"]] + from_, to, o, r, g = (lang.variable(x, lang.Object) for x in ["from", "to", "o", "r", "g"]) problem.action("move", [from_, to], precondition=land(from_ != to, room(from_), room(to), at_robby(from_), flat=True), diff --git a/tests/common/numeric.py b/tests/common/numeric.py index 0c7a6449..5b38393b 100644 --- a/tests/common/numeric.py +++ b/tests/common/numeric.py @@ -17,7 +17,7 @@ def generate_numeric_instance(): # Particles for k in (1, 2, 3, 4): - lang.constant('p{}'.format(k), particle) + lang.constant(f'p{k}', particle) return lang @@ -33,7 +33,7 @@ def generate_billiards_instance(): # The stuff lang.balls = [] for k in (1, 2): - bk = lang.constant('ball_{}'.format(k), ball) + bk = lang.constant(f'ball_{k}', ball) lang.balls.append(bk) lang.dimensions = [] diff --git a/tests/common/simple.py b/tests/common/simple.py index 9f561f7b..1a5330c0 100644 --- a/tests/common/simple.py +++ b/tests/common/simple.py @@ -1,4 +1,3 @@ - import tarski.model from tarski import fstrips as fs from tarski.syntax import land, neg diff --git a/tests/common/spider.py b/tests/common/spider.py index c5f9c350..f2826abd 100644 --- a/tests/common/spider.py +++ b/tests/common/spider.py @@ -1,4 +1,3 @@ - from tarski.fstrips import fstrips from tarski.theories import Theory diff --git a/tests/fol/test_interpretations.py b/tests/fol/test_interpretations.py index 96c781d1..3594ffe7 100644 --- a/tests/fol/test_interpretations.py +++ b/tests/fol/test_interpretations.py @@ -1,4 +1,3 @@ - import tarski import tarski.benchmarks.blocksworld import tarski.model diff --git a/tests/fol/test_syntactic_analysis.py b/tests/fol/test_syntactic_analysis.py index 2fa54813..419576a2 100644 --- a/tests/fol/test_syntactic_analysis.py +++ b/tests/fol/test_syntactic_analysis.py @@ -1,4 +1,3 @@ - from tarski.syntax import neg, land, lor, exists, symref, forall, Variable, Constant, Atom from tarski.syntax.ops import free_variables, flatten, collect_unique_nodes, all_variables from tests.common import tarskiworld diff --git a/tests/fol/test_syntax.py b/tests/fol/test_syntax.py index 0601ae0d..b3237362 100755 --- a/tests/fol/test_syntax.py +++ b/tests/fol/test_syntax.py @@ -1,4 +1,3 @@ - import copy from collections import defaultdict diff --git a/tests/fol/test_variable_bindings.py b/tests/fol/test_variable_bindings.py index 0ead741d..f96140e0 100644 --- a/tests/fol/test_variable_bindings.py +++ b/tests/fol/test_variable_bindings.py @@ -1,5 +1,3 @@ - - def test_object_type(): pass # TODO diff --git a/tests/fstrips/hybrid/tasks.py b/tests/fstrips/hybrid/tasks.py index c897d159..5222b6af 100644 --- a/tests/fstrips/hybrid/tasks.py +++ b/tests/fstrips/hybrid/tasks.py @@ -11,7 +11,7 @@ def create_particles_world(): particles = generate_numeric_instance() task.language = generate_numeric_instance() - x, y, f = [particles.get_function(name) for name in ['x', 'y', 'f']] + x, y, f = (particles.get_function(name) for name in ['x', 'y', 'f']) _ = [particles.get_constant(name) for name in ['p1', 'p2', 'p3', 'p4']] p_var = Variable('p', task.language.get_sort('particle')) @@ -26,7 +26,7 @@ def create_billiards_world(): task = hybrid.Problem() lang = generate_billiards_instance() task.language = lang - m, F, a, v, p = [lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']] + m, F, a, v, p = (lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']) b = Variable('b', lang.get_sort('ball')) d = Variable('d', lang.get_sort('dimension')) diff --git a/tests/fstrips/hybrid/test_differential.py b/tests/fstrips/hybrid/test_differential.py index c89356b1..aa86d27e 100644 --- a/tests/fstrips/hybrid/test_differential.py +++ b/tests/fstrips/hybrid/test_differential.py @@ -7,8 +7,8 @@ def test_diff_constraint_creation(): particles = generate_numeric_instance() - x, y, f = [particles.get_function(name) for name in ['x', 'y', 'f']] - p1, p2, p3, p4 = [particles.get_constant(name) for name in ['p1', 'p2', 'p3', 'p4']] + x, y, f = (particles.get_function(name) for name in ['x', 'y', 'f']) + p1, p2, p3, p4 = (particles.get_constant(name) for name in ['p1', 'p2', 'p3', 'p4']) constraint = hybrid.DifferentialConstraint(particles, 'test', [], top, x(p1), f(p1) * 2.0) assert isinstance(constraint, hybrid.DifferentialConstraint) diff --git a/tests/fstrips/hybrid/test_reactions.py b/tests/fstrips/hybrid/test_reactions.py index 9b036d22..fe846d8f 100644 --- a/tests/fstrips/hybrid/test_reactions.py +++ b/tests/fstrips/hybrid/test_reactions.py @@ -8,7 +8,7 @@ def test_reaction_creation(): from tarski.syntax.arithmetic import summation lang = numeric.generate_billiards_instance() - m, F, a, v, p = [lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']] + m, F, a, v, p = (lang.get_function(n) for n in ['m', 'F', 'a', 'v', 'p']) b = Variable('b', lang.get_sort('ball')) d = Variable('d', lang.get_sort('dimension')) diff --git a/tests/fstrips/test_problem_grounding.py b/tests/fstrips/test_problem_grounding.py index 4f432d86..667f6ea8 100644 --- a/tests/fstrips/test_problem_grounding.py +++ b/tests/fstrips/test_problem_grounding.py @@ -27,7 +27,7 @@ def test_action_grounding_bw(): b1, b2, b3, clear, on, ontable, handempty, holding = \ problem.language.get('b1', 'b2', 'b3', 'clear', 'on', 'ontable', 'handempty', 'holding') unstack = problem.get_action("unstack") - x1, x2 = [symref(x) for x in unstack.parameters] # Unstack has two parameters + x1, x2 = (symref(x) for x in unstack.parameters) # Unstack has two parameters ground = ground_schema_into_plain_operator(unstack, {x1: b1, x2: b2}) # i.e. the operator unstack(b1, b2) assert isinstance(ground, PlainOperator) and \ str(ground.precondition) == '(on(b1,b2) and clear(b1) and handempty())' diff --git a/tests/fstrips/test_representation.py b/tests/fstrips/test_representation.py index bbfeaabd..19efa55d 100644 --- a/tests/fstrips/test_representation.py +++ b/tests/fstrips/test_representation.py @@ -284,7 +284,7 @@ def test_compute_complementary_atoms(): def test_simple_expression_substitutions(): lang = tarski.benchmarks.blocksworld.generate_strips_bw_language(nblocks=2) - clear, b1, b2 = [lang.get(name) for name in ('clear', 'b1', 'b2')] + clear, b1, b2 = (lang.get(name) for name in ('clear', 'b1', 'b2')) x, y = lang.variable('x', 'object'), lang.variable('y', 'object') formula = clear(x) diff --git a/tests/fstrips/test_simplify.py b/tests/fstrips/test_simplify.py index cb428ebc..bd5e63f4 100644 --- a/tests/fstrips/test_simplify.py +++ b/tests/fstrips/test_simplify.py @@ -11,7 +11,7 @@ def test_simplifier(): lang = problem.language value, max_int, counter, val_t, c1 = lang.get('value', 'max_int', 'counter', 'val', 'c1') x = lang.variable('x', counter) - two, three, six = [lang.constant(c, val_t) for c in (2, 3, 6)] + two, three, six = (lang.constant(c, val_t) for c in (2, 3, 6)) s = Simplify(problem, problem.init) assert symref(s.simplify_expression(x)) == symref(x) @@ -76,7 +76,7 @@ def test_simplification_pruning(): problem = generate_fstrips_counters_problem(ncounters=3) lang = problem.language value, max_int, counter, val_t, c1 = lang.get('value', 'max_int', 'counter', 'val', 'c1') - three, six = [lang.constant(c, val_t) for c in (3, 6)] + three, six = (lang.constant(c, val_t) for c in (3, 6)) s = Simplify(problem, problem.init) @@ -98,7 +98,7 @@ def test_simplification_of_ex_quantification(): value, max_int, counter, val_t, c1 = lang.get('value', 'max_int', 'counter', 'val', 'c1') x = lang.variable('x', counter) z = lang.variable('z', counter) - two, three, six = [lang.constant(c, val_t) for c in (2, 3, 6)] + two, three, six = (lang.constant(c, val_t) for c in (2, 3, 6)) phi = exists(z, land(x == z, top, value(z) < six)) assert simplify_existential_quantification(phi, inplace=False) == land(top, value(x) < six), \ diff --git a/tests/io/test_fstrips_parsing.py b/tests/io/test_fstrips_parsing.py index 5484b346..246238e2 100644 --- a/tests/io/test_fstrips_parsing.py +++ b/tests/io/test_fstrips_parsing.py @@ -1,4 +1,3 @@ - import pytest from tarski.errors import UndefinedSort, UndefinedPredicate from tarski.fstrips import AddEffect, FunctionalEffect @@ -107,12 +106,12 @@ def test_domain_name_parsing(): # Test a few names expected to be valid: for domain_name in ["BLOCKS", "blocS-woRlD", "blocks_world"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" _ = r.parse_string(tag, get_rule("domain")) # And a few ones expected to be invalid for domain_name in ["BL#OCKS", "@mydomain", "2ndblocksworld", "blocks2.0"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" with pytest.raises(ParsingError): _ = r.parse_string(tag, get_rule("domain")) @@ -123,12 +122,12 @@ def test_formulas(): # Test a few names expected to be valid: for domain_name in ["BLOCKS", "blocS-woRlD", "blocks_world"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" _ = r.parse_string(tag, get_rule("domain")) # And a few ones expected to be invalid for domain_name in ["BL#OCKS", "@mydomain", "2ndblocksworld", "blocks2.0"]: - tag = "(domain {})".format(domain_name) + tag = f"(domain {domain_name})" with pytest.raises(ParsingError): _ = r.parse_string(tag, get_rule("domain")) @@ -246,7 +245,7 @@ def test_symbol_casing(): # PDDL predicate current-deal remains unaffected _ = problem.language.get_predicate("current-deal") - assert "to-deal" in set(x.symbol for x in get_symbols(problem.language, type_="predicate", include_builtin=False)) + assert "to-deal" in {x.symbol for x in get_symbols(problem.language, type_="predicate", include_builtin=False)} SPIDER_DEAL_CARD_ACTION = """ diff --git a/tests/io/test_fstrips_writer.py b/tests/io/test_fstrips_writer.py index 24287a50..b7c73cb9 100644 --- a/tests/io/test_fstrips_writer.py +++ b/tests/io/test_fstrips_writer.py @@ -65,11 +65,11 @@ def test_effect_writing(): e2 = AddEffect(clear(b1)) e3 = DelEffect(clear(b1)) - s1, s2, s3 = [print_effect(e) for e in [e1, e2, e3]] + s1, s2, s3 = (print_effect(e) for e in [e1, e2, e3]) assert s1 == "(assign (loc b1) table)" assert s2 == "(clear b1)" assert s3 == "(not (clear b1))" - assert print_effects([e1, e2, e3]) == "(and\n {}\n {}\n {})".format(s1, s2, s3) + assert print_effects([e1, e2, e3]) == f"(and\n {s1}\n {s2}\n {s3})" e4 = UniversalEffect([block_var], [AddEffect(clear(block_var))]) s4 = print_effect(e4) diff --git a/tests/io/test_pddl_parsing.py b/tests/io/test_pddl_parsing.py index 11994f2d..8fd17ed6 100644 --- a/tests/io/test_pddl_parsing.py +++ b/tests/io/test_pddl_parsing.py @@ -150,7 +150,7 @@ def test_basic_constructs(): print("Predicates", len(instance.predicates)) print("Types", len(instance.types)) print("Constants", len(instance.constants)) - print("Actions: instantaneous: {} durative: {}".format(len(instance.actions), len(instance.durative))) + print(f"Actions: instantaneous: {len(instance.actions)} durative: {len(instance.durative)}") print("Derived predicates:", len(instance.derived)) print("Initial State literals", len(instance.init)) diff --git a/tests/ndl/test_temporal.py b/tests/ndl/test_temporal.py index c081fce5..a5dc57c7 100644 --- a/tests/ndl/test_temporal.py +++ b/tests/ndl/test_temporal.py @@ -21,7 +21,7 @@ def test_resource_lock_creation(): L = tsk.language("mylang", theories=[Theory.EQUALITY, Theory.ARITHMETIC]) sensor_sort = L.sort('sensor') - camera, range, bearing = [L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')] + camera, range, bearing = (L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')) int_t = L.Integer engaged = L.function('engaged', sensor_sort, int_t) @@ -63,7 +63,7 @@ def test_action_creation(): direction = L.function('direction', platform_t, int_t) sensor_sort = L.sort('sensor') - camera, range, bearing = [L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')] + camera, range, bearing = (L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')) engaged = L.function('engaged', sensor_sort, int_t) region_t = L.sort('region') @@ -105,7 +105,7 @@ def test_instance_creation(): direction = L.function('direction', platform_t, int_t) sensor_sort = L.sort('sensor') - camera, range, bearing = [L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')] + camera, range, bearing = (L.constant(name, sensor_sort) for name in ('camera', 'range', 'bearing')) engaged = L.function('engaged', sensor_sort, int_t) region_t = L.sort('region') diff --git a/tests/reachability/test_reachability_lp.py b/tests/reachability/test_reachability_lp.py index 5addd2a2..eb0dcde8 100644 --- a/tests/reachability/test_reachability_lp.py +++ b/tests/reachability/test_reachability_lp.py @@ -1,4 +1,3 @@ - from tarski.reachability.asp import create_reachability_lp, LogicProgram, ReachabilityLPCompiler, LPAtom from tarski.syntax import exists from tarski import fstrips as fs @@ -16,7 +15,7 @@ def test_lp_compilation(): problem = create_sample_problem() lang = problem.language - x, y, z = [lang.variable(x, lang.Object) for x in ["x", "y", "z"]] + x, y, z = (lang.variable(x, lang.Object) for x in ["x", "y", "z"]) room, ball, at_robby, free, at, gripper, carry = lang.get( "room", "ball", "at-robby", "free", "at", "gripper", "carry") diff --git a/tests/sas/test_action.py b/tests/sas/test_action.py index 8670e00c..47003746 100644 --- a/tests/sas/test_action.py +++ b/tests/sas/test_action.py @@ -22,9 +22,9 @@ def test_variable_interface(): # Constant objects none = L.constant("none", L.Object) - the_balls = [L.constant('ball{}'.format(i), ball_t) for i in range(5)] + the_balls = [L.constant(f'ball{i}', ball_t) for i in range(5)] grippers = [L.constant(gripper_name, gripper_t) for gripper_name in ('left', 'right')] - the_rooms = [L.constant('room{}'.format(i), room_t) for i in ('a', 'b')] + the_rooms = [L.constant(f'room{i}', room_t) for i in ('a', 'b')] # predicates carry = L.function('carry', ball_t, gripper_t, L.Integer) @@ -67,9 +67,9 @@ def test_gripper(): # Constant objects none = L.constant("none", L.Object) - the_balls = [L.constant('ball{}'.format(i), ball_t) for i in range(5)] + the_balls = [L.constant(f'ball{i}', ball_t) for i in range(5)] grippers = [L.constant(gripper_name, gripper_t) for gripper_name in ('left', 'right')] - the_rooms = [L.constant('room{}'.format(i), room_t) for i in ('a', 'b')] + the_rooms = [L.constant(f'room{i}', room_t) for i in ('a', 'b')] # predicates carry = L.function('carry', ball_t, gripper_t, L.Integer) @@ -145,8 +145,8 @@ def test_temporal_action(): qstate_t = L.sort('qstate', L.Object) # constant objects - qbits = [L.constant('n{}'.format(i), qbit_t) for i in range(8)] - qstates = [L.constant('q{}'.format(i), qstate_t) for i in range(8)] + qbits = [L.constant(f'n{i}', qbit_t) for i in range(8)] + qstates = [L.constant(f'q{i}', qstate_t) for i in range(8)] # predicates at = L.function('at', qstate_t, qbit_t, L.Integer) diff --git a/tests/search/test_search_models.py b/tests/search/test_search_models.py index 975cb1d0..a055fb1f 100644 --- a/tests/search/test_search_models.py +++ b/tests/search/test_search_models.py @@ -31,7 +31,7 @@ def test_forward_search_model(): moveright_op = ground_schema_into_plain_operator_from_grounding(move, ('rooma', 'roomb')) assert s1 == progress(s0, moveright_op) - successors = set(succ for op, succ in model.successors(s0)) + successors = {succ for op, succ in model.successors(s0)} assert s1 in successors # Let's test add-after-delete semantics are correctly enforced. The move(x, y) action in Gripper doesn't diff --git a/tests/test_utils.py b/tests/test_utils.py index 18659edd..08fab260 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,3 @@ - from tarski.utils import resources diff --git a/tests/transforms/test_syntax_transformations.py b/tests/transforms/test_syntax_transformations.py index 0f835ce8..a4363753 100644 --- a/tests/transforms/test_syntax_transformations.py +++ b/tests/transforms/test_syntax_transformations.py @@ -20,7 +20,7 @@ def test_nnf_conjunction(): _ = bw.get_sort('place') loc = bw.get_function('loc') _ = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) _ = bw.get_constant('table') phi = neg(land(loc(b1) != loc(b2), loc(b3) != loc(b4))) @@ -36,7 +36,7 @@ def test_nnf_double_negation(): _ = bw.get_sort('place') loc = bw.get_function('loc') _ = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) _ = bw.get_constant('table') phi = neg(neg(loc(b1) == loc(b2))) @@ -50,7 +50,7 @@ def test_nnf_quantifier_flips(): bw = tarski.benchmarks.blocksworld.generate_fstrips_bw_language() block = bw.get_sort('block') loc = bw.get_function('loc') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) x = bw.variable('x', block) @@ -74,7 +74,7 @@ def test_nnf_lpl_page_321_antecedent(): def test_prenex_idempotency(): bw = tarski.benchmarks.blocksworld.generate_fstrips_bw_language() loc = bw.get_function('loc') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) phi = loc(b1) == b2 assert str(to_prenex_negation_normal_form(bw, phi, do_copy=True)) == str(phi) @@ -161,7 +161,7 @@ def test_builtin_negation_absorption(): _ = bw.get_sort('place') loc = bw.get_function('loc') _ = bw.get_predicate('clear') - b1, b2, b3, b4 = [bw.get_constant('b{}'.format(k)) for k in range(1, 5)] + b1, b2, b3, b4 = (bw.get_constant(f'b{k}') for k in range(1, 5)) _ = bw.get_constant('table') _ = bw.variable('x', block) From 14238a6ade68a8c2d19395b43be6b0a2941a01a1 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 11:34:23 +0100 Subject: [PATCH 05/14] Fix some more style issues --- src/tarski/__init__.py | 2 +- src/tarski/errors.py | 16 +++++------ src/tarski/fol.py | 2 +- src/tarski/io/pddl/__init__.py | 34 ++++++++++++------------ src/tarski/io/pddl/instance.py | 3 ++- src/tarski/reachability/gringo.py | 13 +++++---- src/tarski/syntax/arithmetic/__init__.py | 9 +++---- src/tarski/syntax/terms.py | 24 +++++++++-------- src/tarski/syntax/visitors.py | 1 - 9 files changed, 51 insertions(+), 53 deletions(-) diff --git a/src/tarski/__init__.py b/src/tarski/__init__.py index 3cb04989..ed8aa708 100644 --- a/src/tarski/__init__.py +++ b/src/tarski/__init__.py @@ -1,5 +1,5 @@ import logging -import sys as sys +import sys from .version import __version__, __version_info__ from .fol import FirstOrderLanguage diff --git a/src/tarski/errors.py b/src/tarski/errors.py index 703a4d2b..715da88e 100644 --- a/src/tarski/errors.py +++ b/src/tarski/errors.py @@ -22,24 +22,21 @@ def __init__(self, msg=None): class LanguageMismatch(SyntacticError): def __init__(self, obj, l1, l2, msg=None): - msg = msg or ('Language mismatch when operating on object {obj} of type {classname}.\n' - 'Expected language: {l2}\n' - 'Actual language: : {l1}\n') \ - .format(obj=obj, classname=type(obj).__name__, l1=l1, l2=l2) + msg = msg or (f'Language mismatch when operating on object {obj} of type {type(obj).__name__}.\n' + f'Expected language: {l2}\n' + f'Actual language: : {l1}\n') super().__init__(msg) class ArityMismatch(SyntacticError): def __init__(self, head, arguments, msg=None): - msg = msg or 'Arity mismatch applying element {} with arity {} to arguments {}'. \ - format(head, head.arity, arguments) + msg = msg or f'Arity mismatch applying element {head} with arity {head.arity} to arguments {arguments}' super().__init__(msg) class SortMismatch(SyntacticError): def __init__(self, element, type_, expected_type, msg=None): - msg = msg or 'Sort mismatch on element {}. Expected sort was "{}", element has sort "{}"'.format( - element, expected_type, type_) + msg = msg or f'Sort mismatch on element {element}. Expected sort: "{expected_type}". Actual sort: "{type_}"' super().__init__(msg) @@ -128,8 +125,7 @@ def __init__(self, var, msg=None): class IncorrectExtensionDefinition(SemanticError): def __init__(self, element, point, value, msg=None): - msg = msg or 'Incorrect definition of extension of symbol "{}". Cannot assign value "{}" to point "{}"'.format( - element, value, point) + msg = msg or f'Incorrect extension of symbol "{element}". Cannot assign value "{value}" to point "{point}"' super().__init__(msg) diff --git a/src/tarski/fol.py b/src/tarski/fol.py index a1571508..74838b4b 100644 --- a/src/tarski/fol.py +++ b/src/tarski/fol.py @@ -361,7 +361,7 @@ def connected_in_type_hierarchy(self, t_0, t_goal): """ if t_goal in self.indirect_ancestor_sorts[t_0]: return True - OPEN = [t for t in self.ancestor_sorts[t_0]] + OPEN = list(self.ancestor_sorts[t_0]) while len(OPEN) != 0: t = OPEN.pop() if t == t_goal: diff --git a/src/tarski/io/pddl/__init__.py b/src/tarski/io/pddl/__init__.py index fa9e6e9e..e857df86 100644 --- a/src/tarski/io/pddl/__init__.py +++ b/src/tarski/io/pddl/__init__.py @@ -13,23 +13,23 @@ class Features(Enum): """PDDL 3.1 features""" - STRIPS = 0, - TYPING = 1, - NEGATIVE_PRECONDITIONS = 2, - DISJUNCTIVE_PRECONDITIONS = 3, - EQUALITY = 4, - EXISTENTIAL_PRECONDITIONS = 5, - UNIVERSAL_PRECONDITIONS = 6, - CONDITIONAL_EFFECTS = 7, - NUMERIC_FLUENTS = 8, - DURATIVE_ACTIONS = 9, - DURATION_INEQUALITIES = 10, - CONTINUOUS_EFFECTS = 11, - DERIVED_PREDICATES = 12, - TIMED_INITIAL_LITERALS = 13, - PREFERENCES = 14, - CONSTRAINTS = 15, - ACTION_COSTS = 16, + STRIPS = 0 + TYPING = 1 + NEGATIVE_PRECONDITIONS = 2 + DISJUNCTIVE_PRECONDITIONS = 3 + EQUALITY = 4 + EXISTENTIAL_PRECONDITIONS = 5 + UNIVERSAL_PRECONDITIONS = 6 + CONDITIONAL_EFFECTS = 7 + NUMERIC_FLUENTS = 8 + DURATIVE_ACTIONS = 9 + DURATION_INEQUALITIES = 10 + CONTINUOUS_EFFECTS = 11 + DERIVED_PREDICATES = 12 + TIMED_INITIAL_LITERALS = 13 + PREFERENCES = 14 + CONSTRAINTS = 15 + ACTION_COSTS = 16 OBJECT_FLUENTS = 17 diff --git a/src/tarski/io/pddl/instance.py b/src/tarski/io/pddl/instance.py index 1dafe30f..4d343fea 100644 --- a/src/tarski/io/pddl/instance.py +++ b/src/tarski/io/pddl/instance.py @@ -22,7 +22,8 @@ AssignmentEffectData = namedtuple('AssignmentEffectData', ['lhs', 'rhs']) EventData = namedtuple('EventData', ['pre', 'post']) ActionData = namedtuple('ActionData', ['name', 'parameters', 'pre', 'post']) -DurativeActionData = namedtuple('DurativeActionData', ['name', 'parameters', 'at_start', 'at_end', 'overall', 'duration']) +DurativeActionData = namedtuple('DurativeActionData', + ['name', 'parameters', 'at_start', 'at_end', 'overall', 'duration']) DerivedPredicateData = namedtuple('DerivedPredicateData', ['head', 'parameters', 'body']) ObjectiveData = namedtuple('ObjectiveData', ['mode', 'type', 'expr']) diff --git a/src/tarski/reachability/gringo.py b/src/tarski/reachability/gringo.py index 9a7d1475..392c45c0 100644 --- a/src/tarski/reachability/gringo.py +++ b/src/tarski/reachability/gringo.py @@ -1,5 +1,8 @@ #!/usr/bin/env python3 import sys +from typing import Sequence + +from clingo import Control from clingo.application import Application, clingo_main # type: ignore @@ -10,17 +13,17 @@ class WrapperClingo(Application): def __init__(self, name): self.program_name = name - def main(self, ctl, files): + def main(self, control: Control, files: Sequence[str]) -> None: """ The default implementation from clingo documentation Note- main(...) must be implemented """ for f in files: - ctl.load(f) + control.load(f) if not files: - ctl.load("-") - ctl.ground([("base", [])]) - ctl.solve() + control.load("-") + control.ground([("base", [])]) + control.solve() # run the clingo application in the default gringo mode diff --git a/src/tarski/syntax/arithmetic/__init__.py b/src/tarski/syntax/arithmetic/__init__.py index d7fcf967..625108eb 100644 --- a/src/tarski/syntax/arithmetic/__init__.py +++ b/src/tarski/syntax/arithmetic/__init__.py @@ -32,15 +32,12 @@ def prodterm(*args): variables = args[:-1] expr = args[-1] if len(variables) < 1: - raise err.SyntacticError(msg='sumterm(x0,x1,...,xn,expr) requires at least one\ - bound variable, arguments: {}'.format(args)) + raise err.SyntacticError(f'prod(x0,x1,...,xn,expr) requires at least one bound variable, arguments: {args}') for x in variables: if not isinstance(x, Variable): - raise err.SyntacticError(msg='sum(x0,...,xn,expr) require each\ - argument xi to be an instance of Variable') + raise err.SyntacticError('prod(x0,...,xn,expr) requires each argument xi to be an instance of Variable') if not isinstance(expr, Term): - raise err.SyntacticError(msg='sum(x0,x1,...,xn,expr) requires last \ - argument "expr" to be an instance of Term, got "{}"'.format(expr)) + raise err.SyntacticError(f'prod(x0,x1,...,xn,expr) requires "expr" to be a Term, got "{expr}"') return AggregateCompoundTerm(BuiltinFunctionSymbol.MUL, variables, expr) diff --git a/src/tarski/syntax/terms.py b/src/tarski/syntax/terms.py index ce435a54..0ce44c5f 100644 --- a/src/tarski/syntax/terms.py +++ b/src/tarski/syntax/terms.py @@ -197,7 +197,8 @@ def sort(self): return self.symbol.codomain def __str__(self): - return '{}({})'.format(self.symbol.symbol, ', '.join(str(t) for t in self.subterms)) + args = ', '.join(str(t) for t in self.subterms) + return f'{self.symbol.symbol}({args})' __repr__ = __str__ @@ -258,20 +259,21 @@ def __init__(self, condition, subterms: Tuple[Term, Term]): if len(subterms) != 2: raise err.ArityMismatch('IfThenElse', subterms, msg='IfThenElse term needs exactly two sub terms') - self.symbol = subterms[0].language.get('ite') + left, right = subterms + + self.symbol = left.language.get('ite') self.condition = condition # Our implementation of ite requires both branches to have equal sort - if subterms[0].sort != subterms[1].sort: - if parent(subterms[0].sort) == subterms[1].sort: - self._sort = subterms[1].sort - elif parent(subterms[1].sort) == subterms[0].sort: - self._sort = subterms[0].sort + if left.sort != right.sort: + if parent(left.sort) == right.sort: + self._sort = right.sort + elif parent(right.sort) == left.sort: + self._sort = left.sort else: - raise err.SyntacticError( - msg='IfThenElse: both subterms need to be of the same sort! lhs: "{}"({}), rhs: "{}"({})'.format( - subterms[0], subterms[0].sort, subterms[1], subterms[1].sort)) + raise err.SyntacticError('IfThenElse: Mismatching subterm sort. ' + f'lhs: "{left}"({left.sort}), rhs: "{right}"({right.sort})') else: - self._sort = subterms[0].sort + self._sort = left.sort self.subterms = tuple(subterms) diff --git a/src/tarski/syntax/visitors.py b/src/tarski/syntax/visitors.py index da965b92..d321a2e6 100644 --- a/src/tarski/syntax/visitors.py +++ b/src/tarski/syntax/visitors.py @@ -91,4 +91,3 @@ def visit(self, phi): elif isinstance(phi, Atom): if is_eq_atom(phi): self.atoms.add(symref(phi)) - return From 9be1ae7d37055e4bbfcd55da80507e2759d00714 Mon Sep 17 00:00:00 2001 From: Miquel Ramirez Date: Wed, 9 Mar 2022 23:09:07 +1100 Subject: [PATCH 06/14] Tarski examples - "SAS without PDDL" work in progress --- examples/sas_without_pddl/__init__.py | 0 examples/sas_without_pddl/__main__.py | 180 ++++++++++++++++++++++++++ src/tarski/model.py | 2 +- 3 files changed, 181 insertions(+), 1 deletion(-) create mode 100644 examples/sas_without_pddl/__init__.py create mode 100644 examples/sas_without_pddl/__main__.py diff --git a/examples/sas_without_pddl/__init__.py b/examples/sas_without_pddl/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/sas_without_pddl/__main__.py b/examples/sas_without_pddl/__main__.py new file mode 100644 index 00000000..fd5afadf --- /dev/null +++ b/examples/sas_without_pddl/__main__.py @@ -0,0 +1,180 @@ +# ---------------------------------------------------------------------------------------------------------------------- +# examples/sas_without_pddl/__main__.py +# +# In this example we construct a sas instance programmatically and write it in a JSON document which can then be +# transformed or loaded by a backend planner. +# +# Extensive inline comments explain the code as we go. +# ---------------------------------------------------------------------------------------------------------------------- + +import os +from argparse import ArgumentParser, Namespace +from collections import namedtuple +from itertools import product +from tarski.evaluators.simple import evaluate + +import tarski +import tarski.model + +from tarski.syntax import symref +from tarski.theories import Theory +from tarski.syntax.transform.substitutions import substitute_expression, create_substitution +from tarski.util import SymbolIndex + +Schema = namedtuple('Schema', ['name', 'variables', 'constraints', 'transitions']) +Action = namedtuple('Action', ['name', 'arguments', 'transitions']) + + +def process_command_line(): + parser = ArgumentParser(description="Example illustrating acquisition of instance data from PDDL") + parser.add_argument("--instance", dest='instance', default='4-0') + parser.add_argument("--verbose", dest='verbose', action='store_true') + opt = parser.parse_args() + return opt + + +def check_constraints(C, s, subst): + return all([s[substitute_expression(c, subst)] for c in C]) + + +def ground_action_schemas(lang, schemas): + + actions = [] + s = tarski.model.create(lang) + s.evaluator = evaluate + + for sch in schemas: + sch_x = [entry[0] for entry in sch.variables] + sch_D = [entry[1] for entry in sch.variables] + + for a in product(*sch_D): + subst = create_substitution(sch_x, a) + if not check_constraints(sch.constraints, s, subst): + continue + + action_a = Action(name=sch.name, + arguments=a, + transitions=[(substitute_expression(x, subst), + substitute_expression(pre, subst), + substitute_expression(post, subst)) for x, pre, post in sch.transitions]) + actions += [action_a] + + return actions + + +def main(opt: Namespace): + + lang = tarski.language("blocksworld", theories=[Theory.EQUALITY, Theory.ARITHMETIC]) + + object_type = lang.get('object') + + blocks = [lang.constant(obj, object_type) for obj in ['A', 'B', 'C', 'D']] + table = lang.constant('table', object_type) + nothing = lang.constant('nothing', object_type) + + on = lang.function('on', object_type, object_type) + below = lang.function('below', object_type, object_type) + holding = lang.function('holding', object_type) + + # We define SAS action schemas in a very straightforward way + target = lang.variable('x', object_type) + pickup = Schema(name='pickup', + variables=[(target, blocks)], + constraints=[], + transitions=[ + (holding(), nothing, target), + (on(target), nothing, nothing), + (below(target), table, nothing) + ]) + + putdown = Schema(name='putdown', + variables=[(target, blocks)], + constraints=[], + transitions=[ + (holding(), target, nothing), + (below(target), nothing, table) + ]) + + target2 = lang.variable('y', object_type) + stack = Schema(name='stack', + variables=[(target, blocks), (target2, blocks)], + constraints=[target != target2], + transitions=[ + (holding(), target, nothing), + (on(target2), nothing, target), + (below(target), nothing, target2) + ]) + + unstack = Schema(name='unstack', + variables=[(target, blocks), (target2, blocks)], + constraints=[target != target2], + transitions=[ + (holding(), nothing, target), + (on(target2), target, nothing), + (below(target), target2, nothing) + ]) + + actions = ground_action_schemas(lang, [pickup, putdown, stack, unstack]) + + A, B, C, D = blocks + + initial = [ + (on(C), nothing), + (on(A), nothing), + (on(B), nothing), + (on(D), nothing), + (below(C), table), + (below(A), table), + (below(B), table), + (below(D), table), + (holding(), nothing) + ] + + # we could also have the goal + goal = [ + (on(D), C), + (on(C), B), + (on(B), A) + ] + + print("Ground actions generated:", len(actions)) + + X = SymbolIndex() + for act in actions: + for x, _, _ in act.transitions: + if symref(x) in X: + continue + X.add(symref(x)) + for x, _ in initial: + if symref(x) in X: + continue + X.add(symref(x)) + for x, _ in goal: + if symref(x) in X: + continue + X.add(symref(x)) + + print("State variables:", len(X)) + + D = SymbolIndex() + D.add(symref(nothing)) + D.add(symref(table)) + for b in blocks: + D.add(symref(b)) + + for k, a in enumerate(actions): + for x, v, w in a.transitions: + #print(k, x, v, w) + #print(k, type(x), type(v), type(w)) + print(k, X.get_index(symref(x)), D.get_index(symref(v)), D.get_index(symref(w))) + + for x, v in initial: + print(X.get_index(symref(x)), D.get_index(symref(v))) + + for x, v in goal: + print(X.get_index(symref(x)), D.get_index(symref(v))) + + +if __name__ == '__main__': + opt = process_command_line() + main(opt) \ No newline at end of file diff --git a/src/tarski/model.py b/src/tarski/model.py index 9c88c693..f7c2b1df 100644 --- a/src/tarski/model.py +++ b/src/tarski/model.py @@ -2,7 +2,7 @@ from typing import Union from . import errors as err -from .syntax import Function, Constant, CompoundTerm, symref +from .syntax import Function, Constant, CompoundTerm, symref, Variable from .syntax.predicate import Predicate From 70185764ab4927d7e4b11fbe0e620d92b912c7d5 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 11:47:11 +0100 Subject: [PATCH 07/14] Temporarily disable 3.11 alpha build for unit tests --- .github/workflows/unit-tests.yml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index f61fe4cc..f60adbce 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -30,10 +30,14 @@ jobs: # python-version: ['3.8', '3.9', '3.10', 'pypy-3.9'] python-version: ['3.8', '3.9', '3.10'] experimental: [false] - include: - - os: ubuntu-latest - python-version: '3.11.0-alpha.5' - experimental: true + + # I'm temporarily disabling the build on the 3.11 alpha, as the installation aims at buiilding the scipy wheel, + # which fails spectacularly with a 10K lines log of Fortran compilation errors. + # Let's wait a bit until a binary wheel is available +# include: +# - os: ubuntu-latest +# python-version: '3.11.0-alpha.5' +# experimental: true runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} From 90b718eb86eafdc78f9cbc7ca3ecc28406477d95 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 14:45:28 +0100 Subject: [PATCH 08/14] Set up workflow for automatically publishing to pypi. --- .github/workflows/publish-to-pypi.yml | 46 ++++++++++++++++++++ .github/workflows/style-docs-grammars.yml | 4 ++ .github/workflows/unit-tests.yml | 4 ++ docs/conf.py | 2 +- docs/development/releasing.md | 51 ++++++++--------------- 5 files changed, 72 insertions(+), 35 deletions(-) create mode 100644 .github/workflows/publish-to-pypi.yml diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml new file mode 100644 index 00000000..43e0dd58 --- /dev/null +++ b/.github/workflows/publish-to-pypi.yml @@ -0,0 +1,46 @@ +name: Publish Python 🐍 distributions 📦 to PyPI and TestPyPI + +# Relevant pointers: +# - https://github.com/pypa/gh-action-pypi-publish +# - https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries +# - https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ + +on: + # We assume that releases are made from master branch, which is protected against direct pushes, and hence all code + # there will have successfully run through all unit test, etc. in the PR-configured workflows + release: + types: [published] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - name: Clone repo + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.10' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build package + run: python -m build + + # We'll first do a test upload, and only if successful do the real upload + - name: Publish package to Test PyPI + uses: pypa/gh-action-pypi-publish@release/v1.5.0 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@release/v1.5.0 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/style-docs-grammars.yml b/.github/workflows/style-docs-grammars.yml index dade29bb..5ed3bdfb 100644 --- a/.github/workflows/style-docs-grammars.yml +++ b/.github/workflows/style-docs-grammars.yml @@ -1,7 +1,11 @@ name: Style checks, documentation building, grammars on: + # Run workflow on all PRs plus on direct pushes to devel. master is protected against direct pushes. pull_request: + push: + branches: + - devel env: DOWNWARD_BENCHMARKS: "/tmp/downward-benchmarks" diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index f60adbce..335b8dbc 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -1,7 +1,11 @@ name: Unit tests on: + # Run workflow on all PRs plus on direct pushes to devel. master is protected against direct pushes. pull_request: + push: + branches: + - devel env: DOWNWARD_BENCHMARKS: "/tmp/downward-benchmarks" diff --git a/docs/conf.py b/docs/conf.py index 64359a33..ecd5acc2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -28,7 +28,7 @@ # -- Project information ----------------------------------------------------- project = 'Tarski' -copyright = '2019-2020, Miquel Ramírez and Guillem Francès' +copyright = '2019-2022, Miquel Ramírez and Guillem Francès' author = 'Miquel Ramírez and Guillem Francès' # The short X.Y version. diff --git a/docs/development/releasing.md b/docs/development/releasing.md index 97bc583d..74e8e3cc 100644 --- a/docs/development/releasing.md +++ b/docs/development/releasing.md @@ -1,39 +1,22 @@ -# Release Process +# Creating Tarski releases -## Uploading a Tarski release to the Python Package Index +The release procedure is as follows: -Docs: +We release by creating a PR against the `master` branch, then manually c -1. Update the `CHANGELOG.md` file. -1. Update the version number in `src/tarski/version.py` -1. Commit. Use message like "Preparing for release 0.6.0". -1. Go to the `master` branch and merge the `devel` branch. -1. Tag the release. - ```bash - export TARSKI_RELEASE="v0.2.0" - git tag -a ${TARSKI_RELEASE} -m "Tarski release ${TARSKI_RELEASE}" - ``` - -1. Run the following instructions from the root directory of the project: - ```bash - python3 -m pip install --upgrade twine setuptools wheel # (optional) - - rm -rf dist/ - python3 setup.py sdist bdist_wheel - - # Test first (result will go to https://test.pypi.org/project/tarski/): - python3 -m twine upload --skip-existing --repository-url https://test.pypi.org/legacy/ dist/* - - # Then go! - python3 -m twine upload --skip-existing dist/* - ``` - -1. Push all code changes plus the tag to the repo: - ```bash - git push && git push origin ${TARSKI_RELEASE} - ``` - -1. Check the [Github releases page](https://github.com/aig-upf/tarski/releases) to make sure the new release appears - there and can be downwloaded. +1. Make sure you're in the `devel` branch. +2. Update the version number in `src/tarski/version.py` +3. Update the `CHANGELOG.md` file manually to reflect the changes since the last release. +4. Commit the changes. Use message like "Preparing for release 0.6.0". +5. Create a PR against the `master` branch. +6. Once all workflows have executed successfully, merge the PR. +7. Create a [new release](https://github.com/aig-upf/tarski/releases/new) in the GH repo, + with an appropriate tag such as `v0.2.0`. The new release will trigger a GH workflow that will + automatically upload the package to `pypi`. +## Relevant Pointers +* +* +* +* From 1088b35423ef544767307f21cee21dd3db562b37 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 16:14:01 +0100 Subject: [PATCH 09/14] Fix style --- src/tarski/model.py | 2 +- src/tarski/reachability/gringo.py | 2 +- src/tarski/utils/command.py | 5 ----- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/src/tarski/model.py b/src/tarski/model.py index f7c2b1df..9c88c693 100644 --- a/src/tarski/model.py +++ b/src/tarski/model.py @@ -2,7 +2,7 @@ from typing import Union from . import errors as err -from .syntax import Function, Constant, CompoundTerm, symref, Variable +from .syntax import Function, Constant, CompoundTerm, symref from .syntax.predicate import Predicate diff --git a/src/tarski/reachability/gringo.py b/src/tarski/reachability/gringo.py index 392c45c0..c6fe4f0f 100644 --- a/src/tarski/reachability/gringo.py +++ b/src/tarski/reachability/gringo.py @@ -2,7 +2,7 @@ import sys from typing import Sequence -from clingo import Control +from clingo import Control # type: ignore from clingo.application import Application, clingo_main # type: ignore diff --git a/src/tarski/utils/command.py b/src/tarski/utils/command.py index 8a3d5a60..5b555971 100644 --- a/src/tarski/utils/command.py +++ b/src/tarski/utils/command.py @@ -1,12 +1,7 @@ import errno import logging import subprocess -from contextlib import contextmanager -import ctypes -import io import os -import sys -import tempfile def count_file_lines(filename): # Might be a bit faster with a call to "wc -l" From b532a18979085dfb053d2a1ce7bdf3ad7572002e Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Wed, 9 Mar 2022 16:30:38 +0100 Subject: [PATCH 10/14] Get rid of multipledispatch dependency Use the new functools.singledispatchmethod instead --- setup.py | 2 - src/tarski/fstrips/manipulation/simplify.py | 11 ++--- src/tarski/fstrips/ops.py | 48 +++++++++----------- src/tarski/fstrips/walker.py | 14 ++---- src/tarski/syntax/transform/substitutions.py | 5 +- src/tarski/syntax/walker.py | 14 ++---- 6 files changed, 36 insertions(+), 58 deletions(-) diff --git a/setup.py b/setup.py index 7fd6d9e3..c4867e8f 100644 --- a/setup.py +++ b/setup.py @@ -55,8 +55,6 @@ def main(): # to the functioning of Tarski, better be conservative here and install only on Linux. 'psutil; platform_system=="Linux"', - 'multipledispatch', - # Antlr pinned to a specific version to avoid messages "ANTLR runtime and generated code versions disagree" # messages. If we want to bump this up, we'll need to regenerate the grammar files with the new version. 'antlr4-python3-runtime==4.7.2', diff --git a/src/tarski/fstrips/manipulation/simplify.py b/src/tarski/fstrips/manipulation/simplify.py index 64830d9a..89f7b7ba 100644 --- a/src/tarski/fstrips/manipulation/simplify.py +++ b/src/tarski/fstrips/manipulation/simplify.py @@ -1,6 +1,5 @@ import copy - -from multipledispatch import dispatch # type: ignore +from functools import singledispatchmethod from ..fstrips import AddEffect, DelEffect, UniversalEffect, FunctionalEffect from ..ops import collect_all_symbols, compute_number_potential_groundings @@ -197,12 +196,12 @@ def simplify_existential_quantification(node, inplace=True): class ExistentialQuantificationSimplifier(FOLWalker): """ Replaces a formula of the form ∃x.φ[x] ∧ x = t by the formula φ[x/t]. """ - @dispatch(object) - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @singledispatchmethod + def visit(self, node): return self.default_handler(node) - @dispatch(QuantifiedFormula) # type: ignore - def visit(self, node: QuantifiedFormula): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: QuantifiedFormula): if node.quantifier == Quantifier.Forall: return node diff --git a/src/tarski/fstrips/ops.py b/src/tarski/fstrips/ops.py index 9967c9b3..8ace57ee 100644 --- a/src/tarski/fstrips/ops.py +++ b/src/tarski/fstrips/ops.py @@ -1,12 +1,7 @@ import operator -from functools import reduce +from functools import singledispatchmethod, reduce from typing import Set, Union -# At the moment we're using the "multipledispatch" package to implement single-argument dispatching. -# Whenever we move to support Python 3.8+, we could directly use -# https://docs.python.org/3/library/functools.html#functools.singledispatchmethod -from multipledispatch import dispatch # type: ignore - from .walker import ProblemWalker from ..syntax import Predicate, Function, CompoundTerm, Atom from .problem import Problem @@ -45,17 +40,17 @@ def __init__(self): super().__init__() self.symbols = set() - @dispatch(object) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 + @singledispatchmethod + def visit(self, node): return self.default_handler(node) - @dispatch(CompoundTerm) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: CompoundTerm): self.symbols.add(node.symbol) return node - @dispatch(Atom) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: Atom): self.symbols.add(node.symbol) return node @@ -66,37 +61,38 @@ def __init__(self): super().__init__() self.symbols = set() - @dispatch(object) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @singledispatchmethod + def visit(self, node): + # raise NotImplementedError(f'Cannot negate node {node} with type "{type(node)}"') return self.default_handler(node) - @dispatch(fs.AddEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.AddEffect): self.symbols.add(node.atom.symbol) return node - @dispatch(fs.DelEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.DelEffect): self.symbols.add(node.atom.symbol) return node - @dispatch(fs.FunctionalEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.FunctionalEffect): self.symbols.add(node.lhs.symbol) return node - @dispatch(fs.ChoiceEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.ChoiceEffect): self.symbols.add(node.obj.symbol) return node - @dispatch(fs.LinearEffect) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: fs.LinearEffect): self.symbols.update(lhs.symbol for lhs in node.y[:, 0]) return node - @dispatch(Derived) # type: ignore - def visit(self, node): # pylint: disable-msg=E0102 # noqa: F811 + @visit.register + def _(self, node: Derived): self.symbols.update(node.predicate) return node diff --git a/src/tarski/fstrips/walker.py b/src/tarski/fstrips/walker.py index 67f86b07..777aca2b 100644 --- a/src/tarski/fstrips/walker.py +++ b/src/tarski/fstrips/walker.py @@ -41,16 +41,10 @@ def __str__(self): class ProblemWalker: - """ This is an experimental implementation of a visitor pattern based on single-dispatch. - At the moment we're using the "multipledispatch" package to implement single-argument dispatching. - It's far from perfect; it requires that the subclass declares the following "default" method: - - >>> @dispatch(object) - >>> def visit(self, node): # pylint: disable-msg=E0102 - >>> return self.default_handler(node) - - Whenever we move to support Python 3.8+, we could directly use: - https://docs.python.org/3/library/functools.html#functools.singledispatchmethod + """ + This is an experimental implementation of a visitor pattern based on single-dispatch. + To use it, you need to subclass it and "overload" the `visit` function using the + `functools.singledispatchmethod` decorator, as it is done, for instance, in the class AllSymbolWalker. """ def __init__(self, raise_on_undefined=False): self.default_handler = self._raise if raise_on_undefined else self._donothing diff --git a/src/tarski/syntax/transform/substitutions.py b/src/tarski/syntax/transform/substitutions.py index c39b5264..31b2e5cc 100644 --- a/src/tarski/syntax/transform/substitutions.py +++ b/src/tarski/syntax/transform/substitutions.py @@ -1,8 +1,6 @@ import itertools from typing import List -from multipledispatch import dispatch # type: ignore - from ..symrefs import symref from ..terms import Variable from ..walker import FOLWalker @@ -14,8 +12,7 @@ def __init__(self, substitution): super().__init__(raise_on_undefined=False) self.substitution = substitution - @dispatch(object) - def visit(self, node): # pylint: disable-msg=E0102 + def visit(self, node): x = self.substitution.get(symref(node)) return node if x is None else x diff --git a/src/tarski/syntax/walker.py b/src/tarski/syntax/walker.py index a1fae2ac..d81ced1e 100644 --- a/src/tarski/syntax/walker.py +++ b/src/tarski/syntax/walker.py @@ -27,16 +27,10 @@ def __str__(self): class FOLWalker: - """ This is an experimental implementation of a visitor pattern based on single-dispatch. - At the moment we're using the "multipledispatch" package to implement single-argument dispatching. - It's far from perfect; it requires that the subclass declares the following "default" method: - - >>> @dispatch(object) - >>> def visit(self, node): # pylint: disable-msg=E0102 - >>> return self.default_handler(node) - - Whenever we move to support Python 3.8+, we could directly use: - https://docs.python.org/3/library/functools.html#functools.singledispatchmethod + """ + This is an experimental implementation of a visitor pattern based on single-dispatch. + To use it, you need to subclass it and "overload" the `visit` function using the + `functools.singledispatchmethod` decorator, as it is done, for instance, in the class AllSymbolWalker. """ def __init__(self, raise_on_undefined=False): self.default_handler = self._raise if raise_on_undefined else self._donothing From 538c8fb491ef92fd72999e6be6755576df40ee4e Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Thu, 10 Mar 2022 10:13:40 +0100 Subject: [PATCH 11/14] Aesthetic improvements for the import of extra modules --- src/tarski/fol.py | 3 +- src/tarski/funcsym/__init__.py | 7 ++-- src/tarski/io/rddl.py | 3 +- src/tarski/modules.py | 43 ++++++++++++++---------- src/tarski/syntax/algebra/matrix.py | 2 +- src/tarski/syntax/arithmetic/__init__.py | 3 +- src/tarski/syntax/arithmetic/random.py | 6 ++-- src/tarski/syntax/ops.py | 2 +- tests/fol/test_interpretations.py | 8 ++--- 9 files changed, 40 insertions(+), 37 deletions(-) diff --git a/src/tarski/fol.py b/src/tarski/fol.py index 74838b4b..e13390a0 100644 --- a/src/tarski/fol.py +++ b/src/tarski/fol.py @@ -267,8 +267,7 @@ def constants(self): @staticmethod def vector(arraylike, sort: Sort): - np = modules.import_numpy() - return Matrix(np.reshape(arraylike, (len(arraylike), 1)), sort) + return Matrix(modules.numpy.reshape(arraylike, (len(arraylike), 1)), sort) @staticmethod def matrix(arraylike, sort: Sort): diff --git a/src/tarski/funcsym/__init__.py b/src/tarski/funcsym/__init__.py index e5d16b4f..a7956d01 100644 --- a/src/tarski/funcsym/__init__.py +++ b/src/tarski/funcsym/__init__.py @@ -5,13 +5,12 @@ def impl(symbol): """ """ if symbol in {"erf", "erfc"}: - sci = modules.import_scipy_special() return { - "erf": sci.erf, - "erfc": sci.erfc, + "erf": modules.scipy_special.erf, # lazily import scipy.special + "erfc": modules.scipy_special.erfc, }.get(symbol) - np = modules.import_numpy() + np = modules.numpy # lazily import numpy return { "min": lambda x, y: np.min((x, y)), "max": lambda x, y: np.max((x, y)), diff --git a/src/tarski/io/rddl.py b/src/tarski/io/rddl.py index 96ed9359..4440c92b 100644 --- a/src/tarski/io/rddl.py +++ b/src/tarski/io/rddl.py @@ -221,7 +221,8 @@ def __init__(self, filename): def _load_rddl_model(filename): with open(filename, encoding='utf8') as input_file: rddl = input_file.read() - parser = modules.import_pyrddl_parser()() + + parser = modules.RDDLParser() parser.build() # parse RDDL return parser.parse(rddl) diff --git a/src/tarski/modules.py b/src/tarski/modules.py index 251b9dc9..ee6e18a9 100644 --- a/src/tarski/modules.py +++ b/src/tarski/modules.py @@ -1,34 +1,41 @@ -""" A helper module to deal with import of packages that depend on the installation of certain pip extras, -to keep Tarski modular and lightweight for simple uses, but optionally heavyweight for more sophisticated uses. """ +""" +This module helps lazily importing some packages that depend on the installation of certain pip extras, +and that therefore we cannot import greedily. +""" -# TODO: Whenever we raise the Python requirement to Python >= 3.7, we should migrate this to a better -# interface providing direct access to the desired package, e.g. allowing "from tarski.modules import pyrddl", -# which can be easily achieved with the new module-level __getattr__ -# https://docs.python.org/3/reference/datamodel.html#customizing-module-attribute-access - -def import_scipy_special(): +def _import_scipy_special(): try: - import scipy.special as sci # pylint: disable=import-outside-toplevel + import scipy.special # pylint: disable=import-outside-toplevel + return scipy.special except ImportError: raise ImportError('The scipy module does not seem available. ' - 'Please try installing Tarski with the "arithmetic" extra.') from None - return sci + 'Install Tarski with the "arithmetic" extra: pip install "tarski[arithmetic]"') from None -def import_numpy(): +def _import_numpy(): try: - import numpy as np # pylint: disable=import-outside-toplevel + import numpy # pylint: disable=import-outside-toplevel + return numpy except ImportError: raise ImportError('The numpy module does not seem available. ' - 'Please try installing Tarski with the "arithmetic" extra.') from None - return np + 'Install Tarski with the "arithmetic" extra: pip install "tarski[arithmetic]"') from None -def import_pyrddl_parser(): +def _import_pyrddl_parser(): try: from pyrddl.parser import RDDLParser # pylint: disable=import-outside-toplevel + return RDDLParser except ImportError: raise ImportError('The pyrddl module does not seem available. ' - 'Please try installing Tarski with the "rddl" extra.') from None - return RDDLParser + 'Install Tarski with the "rddl" extra: pip install "tarski[rddl]"') from None + + +def __getattr__(name, *args, **kwargs): + if name == 'RDDLParser': + return _import_pyrddl_parser() + elif name == 'numpy': + return _import_numpy() + elif name == 'scipy_special': + return _import_scipy_special() + raise ImportError(f'Module "{name}" is not available') diff --git a/src/tarski/syntax/algebra/matrix.py b/src/tarski/syntax/algebra/matrix.py index a71d383e..6b3c8be6 100644 --- a/src/tarski/syntax/algebra/matrix.py +++ b/src/tarski/syntax/algebra/matrix.py @@ -6,7 +6,7 @@ class Matrix(Term): def __init__(self, arraylike, sort: Sort): - np = modules.import_numpy() + np = modules.numpy self.matrix = np.array(arraylike, dtype=np.dtype(object)) self._sort = sort # verify and cast diff --git a/src/tarski/syntax/arithmetic/__init__.py b/src/tarski/syntax/arithmetic/__init__.py index 625108eb..7291b2f0 100644 --- a/src/tarski/syntax/arithmetic/__init__.py +++ b/src/tarski/syntax/arithmetic/__init__.py @@ -149,7 +149,6 @@ def one(sort): def simplify(expr: Term) -> Term: - np = modules.import_numpy() if isinstance(expr, Constant): return expr elif isinstance(expr, Variable): @@ -202,7 +201,7 @@ def simplify(expr: Term) -> Term: return one(expr.sort) expr.subterms = (simplified,) return expr - elif isinstance(expr, (Matrix, np.ndarray)): + elif isinstance(expr, (Matrix, modules.numpy.ndarray)): N, M = expr.shape for i in range(N): for j in range(M): diff --git a/src/tarski/syntax/arithmetic/random.py b/src/tarski/syntax/arithmetic/random.py index bb67ad35..81656993 100644 --- a/src/tarski/syntax/arithmetic/random.py +++ b/src/tarski/syntax/arithmetic/random.py @@ -9,8 +9,7 @@ def normal(mu, sigma): try: normal_func = mu.language.get_function(bfs.NORMAL) except AttributeError: - np = modules.import_numpy() - return np.random.normal(mu, sigma) + return modules.numpy.random.normal(mu, sigma) return normal_func(mu, sigma) @@ -21,6 +20,5 @@ def gamma(shape, scale): try: gamma_func = scale.language.get_function(bfs.GAMMA) except AttributeError: - np = modules.import_numpy() - return np.random.gamma(shape, scale) + return modules.numpy.random.gamma(shape, scale) return gamma_func(shape, scale) diff --git a/src/tarski/syntax/ops.py b/src/tarski/syntax/ops.py index 0aa3ee41..f5252907 100644 --- a/src/tarski/syntax/ops.py +++ b/src/tarski/syntax/ops.py @@ -17,7 +17,7 @@ def cast_to_closest_common_numeric_ancestor(lang, lhs, rhs): if isinstance(lhs, Term) and isinstance(rhs, Term): return lhs, rhs - np = modules.import_numpy() + np = modules.numpy if isinstance(lhs, Term): if isinstance(rhs, np.ndarray): # lhs is scalar, rhs is matrix return lhs.language.matrix([[lhs]], lhs.sort), rhs diff --git a/tests/fol/test_interpretations.py b/tests/fol/test_interpretations.py index 3594ffe7..5ee13cb2 100644 --- a/tests/fol/test_interpretations.py +++ b/tests/fol/test_interpretations.py @@ -9,12 +9,12 @@ from tarski.evaluators.simple import evaluate from tarski.syntax import Constant, ite, symref from tarski.theories import Theory -from tarski.modules import import_scipy_special +from tarski import modules import pytest try: - sci = import_scipy_special() + sp = modules.scipy_special except ImportError: pytest.skip('Please install the "arithmetic" extra to run the full suite of tests', allow_module_level=True) @@ -230,7 +230,7 @@ def test_special_function_erf(): model.evaluator = evaluate reals = lang.Real x = lang.constant(0.5, reals) - assert model[erf(x)].symbol == sci.erf(0.5) + assert model[erf(x)].symbol == sp.erf(0.5) def test_special_function_erfc(): @@ -240,7 +240,7 @@ def test_special_function_erfc(): model.evaluator = evaluate reals = lang.Real x = lang.constant(0.5, reals) - assert model[erfc(x)].symbol == sci.erfc(0.5) + assert model[erfc(x)].symbol == sp.erfc(0.5) def test_special_function_sgn(): From d8074663e9b48f85d6e66c1973d454cd577691f0 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Fri, 11 Mar 2022 12:36:16 +0100 Subject: [PATCH 12/14] Fix unused import warnings --- src/tarski/dl/__init__.py | 35 +++++++++++++++- src/tarski/fstrips/__init__.py | 24 ++++++++++- src/tarski/fstrips/contingent/__init__.py | 4 +- src/tarski/fstrips/hybrid/__init__.py | 3 +- src/tarski/fstrips/manipulation/__init__.py | 2 + src/tarski/funcsym/__init__.py | 2 +- src/tarski/grounding/__init__.py | 7 ++++ src/tarski/io/__init__.py | 2 + src/tarski/io/fstrips.py | 2 +- src/tarski/rddl/__init__.py | 2 + src/tarski/sas/__init__.py | 2 + src/tarski/search/__init__.py | 2 + src/tarski/syntax/__init__.py | 45 ++++++++++++++++++++- src/tarski/syntax/algebra/__init__.py | 2 + src/tarski/syntax/transform/__init__.py | 15 +++++++ src/tarski/utils/__init__.py | 2 + tox.ini | 2 - 17 files changed, 143 insertions(+), 10 deletions(-) diff --git a/src/tarski/dl/__init__.py b/src/tarski/dl/__init__.py index 27d5e0af..9cfa4b27 100644 --- a/src/tarski/dl/__init__.py +++ b/src/tarski/dl/__init__.py @@ -1,7 +1,40 @@ from .concepts import Concept, PrimitiveConcept, UniversalConcept, NotConcept, ExistsConcept, ForallConcept, \ EqualConcept, AndConcept, OrConcept, EmptyConcept, NominalConcept, NullaryAtom, GoalConcept, GoalNullaryAtom from .concepts import Role, PrimitiveRole, InverseRole, StarRole, RestrictRole, CompositionRole, GoalRole -from .features import MinDistanceFeature, ConceptCardinalityFeature, EmpiricalBinaryConcept, FeatureValueChange,\ +from .features import MinDistanceFeature, ConceptCardinalityFeature, EmpiricalBinaryConcept, FeatureValueChange, \ NullaryAtomFeature, Feature from .factory import SyntacticFactory, compute_dl_vocabulary from .errors import ArityDLMismatch + +__all__ = [ + 'AndConcept', + 'ArityDLMismatch', + 'CompositionRole', + 'Concept', + 'ConceptCardinalityFeature', + 'EmpiricalBinaryConcept', + 'EmptyConcept', + 'EqualConcept', + 'ExistsConcept', + 'Feature', + 'FeatureValueChange', + 'ForallConcept', + 'GoalConcept', + 'GoalNullaryAtom', + 'GoalRole', + 'InverseRole', + 'MinDistanceFeature', + 'NominalConcept', + 'NotConcept', + 'NullaryAtom', + 'NullaryAtomFeature', + 'OrConcept', + 'PrimitiveConcept', + 'PrimitiveRole', + 'RestrictRole', + 'Role', + 'StarRole', + 'SyntacticFactory', + 'UniversalConcept', + 'compute_dl_vocabulary', +] diff --git a/src/tarski/fstrips/__init__.py b/src/tarski/fstrips/__init__.py index f2e92368..fa01b242 100644 --- a/src/tarski/fstrips/__init__.py +++ b/src/tarski/fstrips/__init__.py @@ -2,5 +2,27 @@ from .action import Action from .derived import Derived from .fstrips import BaseEffect, SingleEffect, AddEffect, DelEffect, FunctionalEffect, IncreaseEffect, \ - LiteralEffect, UniversalEffect, ChoiceEffect, VectorisedEffect, LinearEffect, BlackBoxEffect,\ + LiteralEffect, UniversalEffect, ChoiceEffect, VectorisedEffect, LinearEffect, BlackBoxEffect, \ language, OptimizationMetric, OptimizationType + +__all__ = [ + 'Action', + 'AddEffect', + 'BaseEffect', + 'BlackBoxEffect', + 'ChoiceEffect', + 'DelEffect', + 'Derived', + 'FunctionalEffect', + 'IncreaseEffect', + 'LinearEffect', + 'LiteralEffect', + 'OptimizationMetric', + 'OptimizationType', + 'Problem', + 'SingleEffect', + 'UniversalEffect', + 'VectorisedEffect', + 'create_fstrips_problem', + 'language', +] diff --git a/src/tarski/fstrips/contingent/__init__.py b/src/tarski/fstrips/contingent/__init__.py index 427b0f2b..3b53d6e4 100644 --- a/src/tarski/fstrips/contingent/__init__.py +++ b/src/tarski/fstrips/contingent/__init__.py @@ -1,4 +1,4 @@ from .problem import ContingentProblem as Problem -from .. action import Action from .sensor import Sensor -from . import errors + +__all__ = ['Problem', 'Sensor'] diff --git a/src/tarski/fstrips/hybrid/__init__.py b/src/tarski/fstrips/hybrid/__init__.py index eb92efcd..7251944c 100644 --- a/src/tarski/fstrips/hybrid/__init__.py +++ b/src/tarski/fstrips/hybrid/__init__.py @@ -1,4 +1,5 @@ from . problem import HybridProblem as Problem -from .. action import Action from . reaction import Reaction from . differential_constraints import DifferentialConstraint + +__all__ = ['Problem', 'Reaction', 'DifferentialConstraint'] diff --git a/src/tarski/fstrips/manipulation/__init__.py b/src/tarski/fstrips/manipulation/__init__.py index 068cd50e..bf825092 100644 --- a/src/tarski/fstrips/manipulation/__init__.py +++ b/src/tarski/fstrips/manipulation/__init__.py @@ -1 +1,3 @@ from .simplify import Simplify + +__all__ = ['Simplify'] diff --git a/src/tarski/funcsym/__init__.py b/src/tarski/funcsym/__init__.py index a7956d01..e6206c05 100644 --- a/src/tarski/funcsym/__init__.py +++ b/src/tarski/funcsym/__init__.py @@ -6,7 +6,7 @@ def impl(symbol): if symbol in {"erf", "erfc"}: return { - "erf": modules.scipy_special.erf, # lazily import scipy.special + "erf": modules.scipy_special.erf, # lazily import scipy.special "erfc": modules.scipy_special.erfc, }.get(symbol) diff --git a/src/tarski/grounding/__init__.py b/src/tarski/grounding/__init__.py index c8cc835c..660bfcaf 100644 --- a/src/tarski/grounding/__init__.py +++ b/src/tarski/grounding/__init__.py @@ -1,3 +1,10 @@ from .naive_grounding import ProblemGrounding, create_all_possible_state_variables, \ NaiveGroundingStrategy from .lp_grounding import LPGroundingStrategy + +__all__ = [ + 'create_all_possible_state_variables', + 'NaiveGroundingStrategy', + 'LPGroundingStrategy', + 'ProblemGrounding' +] diff --git a/src/tarski/io/__init__.py b/src/tarski/io/__init__.py index 4250741b..f900bcab 100644 --- a/src/tarski/io/__init__.py +++ b/src/tarski/io/__init__.py @@ -3,3 +3,5 @@ # Just a shortcut, turns out they're both the same! :-) PDDLReader = FstripsReader + +__all__ = ['FstripsReader', 'FstripsWriter', 'PDDLReader', 'find_domain_filename'] diff --git a/src/tarski/io/fstrips.py b/src/tarski/io/fstrips.py index b96b20a4..033d945f 100644 --- a/src/tarski/io/fstrips.py +++ b/src/tarski/io/fstrips.py @@ -18,7 +18,7 @@ # Leave the next import so that it can be imported from the outside without warnings of importing a private module # pylint: disable=unused-import -from ._fstrips.reader import ParsingError +from ._fstrips.reader import ParsingError # noqa: F401 class FstripsReader: diff --git a/src/tarski/rddl/__init__.py b/src/tarski/rddl/__init__.py index 280e6a2a..54a602d9 100644 --- a/src/tarski/rddl/__init__.py +++ b/src/tarski/rddl/__init__.py @@ -1 +1,3 @@ from .task import Task + +__all__ = ['Task'] diff --git a/src/tarski/sas/__init__.py b/src/tarski/sas/__init__.py index 703ae145..de521b53 100644 --- a/src/tarski/sas/__init__.py +++ b/src/tarski/sas/__init__.py @@ -2,3 +2,5 @@ from .temporal import TemporalAction from .variable import Variable from .helper import make_domain + +__all__ = ['Effect', 'Action', 'TemporalAction', 'Variable', 'make_domain'] diff --git a/src/tarski/search/__init__.py b/src/tarski/search/__init__.py index 99939e59..30f5dc0e 100644 --- a/src/tarski/search/__init__.py +++ b/src/tarski/search/__init__.py @@ -1,2 +1,4 @@ from .model import SearchModel, GroundForwardSearchModel from .blind import BreadthFirstSearch + +__all__ = ['SearchModel', 'GroundForwardSearchModel', 'BreadthFirstSearch'] diff --git a/src/tarski/syntax/__init__.py b/src/tarski/syntax/__init__.py index 668e8731..20a15356 100644 --- a/src/tarski/syntax/__init__.py +++ b/src/tarski/syntax/__init__.py @@ -3,9 +3,52 @@ from .sorts import Sort, Interval, inclusion_closure from .terms import Term, Constant, Variable, CompoundTerm, IfThenElse, ite, AggregateCompoundTerm from .util import termlists_are_equal, termlist_hash -from .formulas import land, lor, neg, implies, forall, exists, equiv, Connective, Atom, Formula,\ +from .formulas import land, lor, neg, implies, forall, exists, equiv, Connective, Atom, Formula, \ CompoundFormula, QuantifiedFormula, Tautology, Contradiction, top, bot, Quantifier, VariableBinding, \ is_neg, is_and, is_or from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol from .symrefs import symref from .transform.substitutions import create_substitution, substitute_expression + +__all__ = [ + 'AggregateCompoundTerm', + 'Atom', + 'BuiltinFunctionSymbol', + 'BuiltinPredicateSymbol', + 'CompoundFormula', + 'CompoundTerm', + 'Connective', + 'Constant', + 'Contradiction', + 'Formula', + 'Function', + 'IfThenElse', + 'Interval', + 'Predicate', + 'QuantifiedFormula', + 'Quantifier', + 'Sort', + 'Tautology', + 'Term', + 'Variable', + 'VariableBinding', + 'bot', + 'create_substitution', + 'equiv', + 'exists', + 'forall', + 'implies', + 'inclusion_closure', + 'is_and', + 'is_neg', + 'is_or', + 'ite', + 'land', + 'lor', + 'neg', + 'substitute_expression', + 'symref', + 'termlist_hash', + 'termlists_are_equal', + 'top', +] diff --git a/src/tarski/syntax/algebra/__init__.py b/src/tarski/syntax/algebra/__init__.py index 4200d222..bc589b5a 100644 --- a/src/tarski/syntax/algebra/__init__.py +++ b/src/tarski/syntax/algebra/__init__.py @@ -1 +1,3 @@ from .matrix import Matrix + +__all__ = ['Matrix'] diff --git a/src/tarski/syntax/transform/__init__.py b/src/tarski/syntax/transform/__init__.py index 3242df1c..a42f689d 100644 --- a/src/tarski/syntax/transform/__init__.py +++ b/src/tarski/syntax/transform/__init__.py @@ -3,3 +3,18 @@ from .prenex import PrenexTransformation, to_prenex_negation_normal_form from .quantifier_elimination import QuantifierElimination, QuantifierEliminationMode, remove_quantifiers from .neg_builtin import NegatedBuiltinAbsorption + +__all__ = [ + 'CNFTransformation', + 'NNFTransformation', + 'NegatedBuiltinAbsorption', + 'PrenexTransformation', + 'QuantifierElimination', + 'QuantifierEliminationMode', + 'errors', + 'remove_quantifiers', + 'substitutions', + 'to_conjunctive_normal_form', + 'to_negation_normal_form', + 'to_prenex_negation_normal_form' +] diff --git a/src/tarski/utils/__init__.py b/src/tarski/utils/__init__.py index 73e22db9..2dfe5a1c 100644 --- a/src/tarski/utils/__init__.py +++ b/src/tarski/utils/__init__.py @@ -1 +1,3 @@ from .helpers import parse_model + +__all__ = ['parse_model'] diff --git a/tox.ini b/tox.ini index c765785e..3593b741 100644 --- a/tox.ini +++ b/tox.ini @@ -94,8 +94,6 @@ precision = 2 [flake8] max-line-length = 120 ignore = - # Let's deal with "unused imports" once we move to Python 3.7 and can do lazy importing - F401, # The preferred style now is as governed by W504, see https://www.flake8rules.com/rules/W503.html W503, From 33e22a63f5f7b4c3f2f32aa0d83e15af8ac9683c Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Fri, 11 Mar 2022 12:40:27 +0100 Subject: [PATCH 13/14] Run isort --- src/tarski/__init__.py | 10 ++++---- src/tarski/analysis/csp.py | 2 +- src/tarski/analysis/csp_schema.py | 12 +++++---- src/tarski/benchmarks/blocksworld.py | 4 +-- src/tarski/benchmarks/counters.py | 1 - src/tarski/dl/__init__.py | 16 +++++++----- src/tarski/dl/concepts.py | 2 +- src/tarski/dl/factory.py | 8 +++--- src/tarski/dl/features.py | 2 +- src/tarski/evaluators/simple.py | 10 +++++--- src/tarski/fol.py | 7 +++--- src/tarski/fstrips/__init__.py | 10 +++++--- src/tarski/fstrips/contingent/errors.py | 2 +- src/tarski/fstrips/contingent/problem.py | 3 ++- src/tarski/fstrips/contingent/sensor.py | 2 +- src/tarski/fstrips/errors.py | 2 +- src/tarski/fstrips/fstrips.py | 4 +-- src/tarski/fstrips/hybrid/__init__.py | 6 ++--- src/tarski/fstrips/hybrid/errors.py | 2 +- src/tarski/fstrips/hybrid/problem.py | 6 ++--- src/tarski/fstrips/manipulation/simplify.py | 16 ++++++------ src/tarski/fstrips/ops.py | 8 +++--- src/tarski/fstrips/problem.py | 7 +++--- src/tarski/fstrips/representation.py | 20 +++++++++------ src/tarski/fstrips/visitors.py | 4 +-- src/tarski/fstrips/walker.py | 15 +++++++---- src/tarski/grounding/__init__.py | 4 +-- src/tarski/grounding/common.py | 3 ++- src/tarski/grounding/lp_grounding.py | 9 ++++--- src/tarski/grounding/naive/constraints.py | 7 +++--- src/tarski/grounding/naive_grounding.py | 11 ++++---- src/tarski/grounding/ops.py | 2 +- src/tarski/io/_fstrips/common.py | 4 +-- src/tarski/io/_fstrips/parser/lexer.py | 5 ++-- src/tarski/io/_fstrips/parser/listener.py | 1 + src/tarski/io/_fstrips/parser/parser.py | 6 +++-- src/tarski/io/_fstrips/parser/visitor.py | 1 + src/tarski/io/_fstrips/reader.py | 17 +++++++------ src/tarski/io/fstrips.py | 25 +++++++++---------- src/tarski/io/pddl/instance.py | 10 +++----- src/tarski/io/pddl/lexer.py | 3 ++- src/tarski/io/pddl/parser.py | 7 +++--- src/tarski/io/rddl.py | 23 ++++++++++------- src/tarski/model.py | 8 +++--- src/tarski/modules.py | 3 ++- src/tarski/ndl/temporal.py | 3 ++- src/tarski/rddl/task.py | 2 +- src/tarski/reachability/__init__.py | 2 +- src/tarski/reachability/asp.py | 15 ++++++----- src/tarski/reachability/clingo_wrapper.py | 9 ++++--- src/tarski/sas/__init__.py | 4 +-- src/tarski/sas/action.py | 1 + src/tarski/sas/helper.py | 3 ++- src/tarski/sas/variable.py | 5 ++-- src/tarski/search/__init__.py | 2 +- src/tarski/search/model.py | 2 +- src/tarski/search/operations.py | 2 +- src/tarski/syntax/__init__.py | 16 ++++++------ src/tarski/syntax/algebra/matrix.py | 4 +-- src/tarski/syntax/arithmetic/__init__.py | 13 +++++----- src/tarski/syntax/arithmetic/random.py | 2 +- src/tarski/syntax/builtins.py | 6 +++-- src/tarski/syntax/factory.py | 2 +- src/tarski/syntax/formulas.py | 4 +-- src/tarski/syntax/function.py | 3 ++- src/tarski/syntax/ops.py | 8 +++--- src/tarski/syntax/predicate.py | 3 ++- src/tarski/syntax/sorts.py | 9 ++++--- src/tarski/syntax/temporal/ltl.py | 3 ++- src/tarski/syntax/terms.py | 6 ++--- src/tarski/syntax/transform/__init__.py | 8 +++--- .../syntax/transform/action_grounding.py | 4 +-- src/tarski/syntax/transform/cnf.py | 1 - src/tarski/syntax/transform/neg_builtin.py | 2 +- src/tarski/syntax/transform/nnf.py | 5 ++-- src/tarski/syntax/transform/prenex.py | 6 ++--- .../transform/quantifier_elimination.py | 8 +++--- .../syntax/transform/simplifications.py | 3 +-- src/tarski/syntax/visitors.py | 3 ++- src/tarski/syntax/walker.py | 6 +++-- src/tarski/theories.py | 11 ++++---- src/tarski/utils/algorithms.py | 2 +- src/tarski/utils/command.py | 2 +- tests/analysis/test_csp.py | 3 ++- tests/benchmarks/test_benchmarks.py | 3 ++- tests/common/gridworld.py | 2 +- tests/common/parcprinter.py | 4 +-- tests/dl/test_concepts.py | 6 +++-- tests/fol/test_interpretations.py | 23 ++++++++++++----- tests/fol/test_sorts.py | 3 ++- tests/fol/test_syntactic_analysis.py | 6 +++-- tests/fol/test_syntax.py | 14 ++++++----- tests/fstrips/contingent/localize.py | 1 - tests/fstrips/contingent/test_sensors.py | 1 - tests/fstrips/hybrid/tasks.py | 4 +-- tests/fstrips/hybrid/test_differential.py | 1 - tests/fstrips/hybrid/test_reactions.py | 3 +-- tests/fstrips/test_fstrips_operations.py | 4 +-- tests/fstrips/test_problem_grounding.py | 13 +++++----- tests/fstrips/test_representation.py | 25 ++++++++++++------- tests/fstrips/test_simplify.py | 5 ++-- tests/fstrips/test_symbol_classification.py | 5 ++-- tests/fstrips/test_walker.py | 1 - tests/grounding/test_lp_grounding.py | 6 +++-- tests/grounding/test_naive_grounding.py | 22 +++++++++------- tests/io/test_builtin_domains_parsing.py | 1 - tests/io/test_fstrips_full_domains_parsing.py | 10 +++++--- tests/io/test_fstrips_parsing.py | 8 +++--- tests/io/test_fstrips_writer.py | 15 ++++++----- tests/io/test_pddl_parsing.py | 3 ++- tests/io/test_rddl_parsing.py | 2 +- tests/io/test_rddl_writer.py | 8 +++--- tests/ndl/test_temporal.py | 8 +++--- tests/reachability/test_reachability_lp.py | 6 +++-- tests/sas/test_action.py | 8 +++--- tests/search/test_search_models.py | 8 +++--- .../transforms/test_syntax_transformations.py | 15 +++++------ 117 files changed, 441 insertions(+), 326 deletions(-) diff --git a/src/tarski/__init__.py b/src/tarski/__init__.py index ed8aa708..6e2680d6 100644 --- a/src/tarski/__init__.py +++ b/src/tarski/__init__.py @@ -1,13 +1,13 @@ import logging import sys -from .version import __version__, __version_info__ +from . import fstrips +from .errors import LanguageError from .fol import FirstOrderLanguage -from .theories import language +from .syntax import Constant, Formula, Function, Predicate, Term, Variable from .theories import Theory as Theories -from .syntax import Function, Predicate, Formula, Term, Constant, Variable -from .errors import LanguageError -from . import fstrips +from .theories import language +from .version import __version__, __version_info__ logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/src/tarski/analysis/csp.py b/src/tarski/analysis/csp.py index 1cfa994a..6c7677e7 100644 --- a/src/tarski/analysis/csp.py +++ b/src/tarski/analysis/csp.py @@ -5,7 +5,7 @@ from collections import defaultdict from ..errors import TarskiError -from ..syntax import CompoundFormula, Atom, Connective, Variable, Constant +from ..syntax import Atom, CompoundFormula, Connective, Constant, Variable class WrongFormalismError(TarskiError): diff --git a/src/tarski/analysis/csp_schema.py b/src/tarski/analysis/csp_schema.py index 2dedeeaa..ecb565b2 100644 --- a/src/tarski/analysis/csp_schema.py +++ b/src/tarski/analysis/csp_schema.py @@ -2,13 +2,15 @@ from enum import Enum from pathlib import Path -from ..fstrips.manipulation import Simplify from ..errors import TarskiError -from ..fstrips.representation import is_conjunction_of_literals, has_state_variable_shape, \ - collect_effect_free_parameters +from ..fstrips.manipulation import Simplify +from ..fstrips.representation import (collect_effect_free_parameters, + has_state_variable_shape, + is_conjunction_of_literals) from ..grounding.common import StateVariableLite -from ..syntax import QuantifiedFormula, Quantifier, Contradiction, CompoundFormula, Atom, CompoundTerm, \ - is_neg, symref, Constant, Variable, Tautology, top +from ..syntax import (Atom, CompoundFormula, CompoundTerm, Constant, + Contradiction, QuantifiedFormula, Quantifier, Tautology, + Variable, is_neg, symref, top) from ..syntax.ops import collect_unique_nodes, flatten from ..syntax.transform import to_prenex_negation_normal_form diff --git a/src/tarski/benchmarks/blocksworld.py b/src/tarski/benchmarks/blocksworld.py index 49741b4e..0e61f155 100644 --- a/src/tarski/benchmarks/blocksworld.py +++ b/src/tarski/benchmarks/blocksworld.py @@ -5,12 +5,12 @@ import tarski as tsk from tarski import fstrips as fs -from tarski.fstrips import DelEffect, AddEffect +from tarski.fstrips import AddEffect, DelEffect + from ..fstrips import create_fstrips_problem from ..syntax import land from ..theories import Theory - BASE_DOMAIN_NAME = "blocksworld" diff --git a/src/tarski/benchmarks/counters.py b/src/tarski/benchmarks/counters.py index bdb4ca49..706e0d33 100644 --- a/src/tarski/benchmarks/counters.py +++ b/src/tarski/benchmarks/counters.py @@ -5,7 +5,6 @@ from ..syntax import land from ..theories import Theory - BASE_DOMAIN_NAME = "counters-fn" diff --git a/src/tarski/dl/__init__.py b/src/tarski/dl/__init__.py index 9cfa4b27..dc52934a 100644 --- a/src/tarski/dl/__init__.py +++ b/src/tarski/dl/__init__.py @@ -1,10 +1,14 @@ -from .concepts import Concept, PrimitiveConcept, UniversalConcept, NotConcept, ExistsConcept, ForallConcept, \ - EqualConcept, AndConcept, OrConcept, EmptyConcept, NominalConcept, NullaryAtom, GoalConcept, GoalNullaryAtom -from .concepts import Role, PrimitiveRole, InverseRole, StarRole, RestrictRole, CompositionRole, GoalRole -from .features import MinDistanceFeature, ConceptCardinalityFeature, EmpiricalBinaryConcept, FeatureValueChange, \ - NullaryAtomFeature, Feature -from .factory import SyntacticFactory, compute_dl_vocabulary +from .concepts import (AndConcept, CompositionRole, Concept, EmptyConcept, + EqualConcept, ExistsConcept, ForallConcept, GoalConcept, + GoalNullaryAtom, GoalRole, InverseRole, NominalConcept, + NotConcept, NullaryAtom, OrConcept, PrimitiveConcept, + PrimitiveRole, RestrictRole, Role, StarRole, + UniversalConcept) from .errors import ArityDLMismatch +from .factory import SyntacticFactory, compute_dl_vocabulary +from .features import (ConceptCardinalityFeature, EmpiricalBinaryConcept, + Feature, FeatureValueChange, MinDistanceFeature, + NullaryAtomFeature) __all__ = [ 'AndConcept', diff --git a/src/tarski/dl/concepts.py b/src/tarski/dl/concepts.py index 9f46d178..e803c5e9 100644 --- a/src/tarski/dl/concepts.py +++ b/src/tarski/dl/concepts.py @@ -1,7 +1,7 @@ """ """ -from ..syntax import Predicate, Function, Sort +from ..syntax import Function, Predicate, Sort from ..utils.algorithms import transitive_closure from ..utils.hashing import consistent_hash from .errors import ArityDLMismatch diff --git a/src/tarski/dl/factory.py b/src/tarski/dl/factory.py index b0295483..0a91935d 100644 --- a/src/tarski/dl/factory.py +++ b/src/tarski/dl/factory.py @@ -2,9 +2,11 @@ from .. import FirstOrderLanguage from ..syntax import builtins -from . import Concept, Role, UniversalConcept, PrimitiveConcept, NotConcept, ExistsConcept, ForallConcept, \ - EqualConcept, PrimitiveRole, RestrictRole, AndConcept, EmptyConcept, CompositionRole, NominalConcept, NullaryAtom, \ - GoalNullaryAtom, GoalConcept, GoalRole, OrConcept +from . import (AndConcept, CompositionRole, Concept, EmptyConcept, + EqualConcept, ExistsConcept, ForallConcept, GoalConcept, + GoalNullaryAtom, GoalRole, NominalConcept, NotConcept, + NullaryAtom, OrConcept, PrimitiveConcept, PrimitiveRole, + RestrictRole, Role, UniversalConcept) def filter_subnodes(elem, t): diff --git a/src/tarski/dl/features.py b/src/tarski/dl/features.py index 4ffa5921..5f8cfe7c 100644 --- a/src/tarski/dl/features.py +++ b/src/tarski/dl/features.py @@ -3,9 +3,9 @@ """ from enum import Enum -from .concepts import Concept, Role, NullaryAtom from ..utils.algorithms import compute_min_distance from ..utils.hashing import consistent_hash +from .concepts import Concept, NullaryAtom, Role class FeatureValueChange(Enum): diff --git a/src/tarski/evaluators/simple.py b/src/tarski/evaluators/simple.py index 2901b37d..f933335c 100644 --- a/src/tarski/evaluators/simple.py +++ b/src/tarski/evaluators/simple.py @@ -1,12 +1,14 @@ import operator from typing import List -from .. import funcsym from .. import errors as err -from ..syntax import ops, Connective, Atom, Formula, CompoundFormula, QuantifiedFormula, builtins, Variable, \ - Constant, CompoundTerm, Tautology, Contradiction, IfThenElse, AggregateCompoundTerm, Term -from ..syntax.algebra import Matrix +from .. import funcsym from ..model import Model +from ..syntax import (AggregateCompoundTerm, Atom, CompoundFormula, + CompoundTerm, Connective, Constant, Contradiction, + Formula, IfThenElse, QuantifiedFormula, Tautology, Term, + Variable, builtins, ops) +from ..syntax.algebra import Matrix # TODO We will need to extend this so that the interpretation depends on a certain, given sigma of values to diff --git a/src/tarski/fol.py b/src/tarski/fol.py index e13390a0..3b3e2a7b 100644 --- a/src/tarski/fol.py +++ b/src/tarski/fol.py @@ -1,13 +1,14 @@ import copy import itertools -from collections import defaultdict, OrderedDict +from collections import OrderedDict, defaultdict from typing import Union, cast from . import errors as err +from . import modules from .errors import UndefinedElement -from .syntax import Function, Constant, Variable, Sort, inclusion_closure, Predicate, Interval +from .syntax import (Constant, Function, Interval, Predicate, Sort, Variable, + inclusion_closure) from .syntax.algebra import Matrix -from . import modules class FirstOrderLanguage: diff --git a/src/tarski/fstrips/__init__.py b/src/tarski/fstrips/__init__.py index fa01b242..770ce035 100644 --- a/src/tarski/fstrips/__init__.py +++ b/src/tarski/fstrips/__init__.py @@ -1,9 +1,11 @@ -from .problem import Problem, create_fstrips_problem from .action import Action from .derived import Derived -from .fstrips import BaseEffect, SingleEffect, AddEffect, DelEffect, FunctionalEffect, IncreaseEffect, \ - LiteralEffect, UniversalEffect, ChoiceEffect, VectorisedEffect, LinearEffect, BlackBoxEffect, \ - language, OptimizationMetric, OptimizationType +from .fstrips import (AddEffect, BaseEffect, BlackBoxEffect, ChoiceEffect, + DelEffect, FunctionalEffect, IncreaseEffect, + LinearEffect, LiteralEffect, OptimizationMetric, + OptimizationType, SingleEffect, UniversalEffect, + VectorisedEffect, language) +from .problem import Problem, create_fstrips_problem __all__ = [ 'Action', diff --git a/src/tarski/fstrips/contingent/errors.py b/src/tarski/fstrips/contingent/errors.py index b40f939c..4337b89f 100644 --- a/src/tarski/fstrips/contingent/errors.py +++ b/src/tarski/fstrips/contingent/errors.py @@ -1,4 +1,4 @@ -from ...errors import TarskiError, DuplicateDefinition, UndefinedElement +from ...errors import DuplicateDefinition, TarskiError, UndefinedElement class ObservationExpressivenessMismatch(TarskiError): diff --git a/src/tarski/fstrips/contingent/problem.py b/src/tarski/fstrips/contingent/problem.py index 6f8d9d9d..eacdec29 100644 --- a/src/tarski/fstrips/contingent/problem.py +++ b/src/tarski/fstrips/contingent/problem.py @@ -1,7 +1,8 @@ from collections import OrderedDict + from ..problem import Problem -from .sensor import Sensor from . import errors as err +from .sensor import Sensor class ContingentProblem(Problem): diff --git a/src/tarski/fstrips/contingent/sensor.py b/src/tarski/fstrips/contingent/sensor.py index 27c61a04..a82054c8 100644 --- a/src/tarski/fstrips/contingent/sensor.py +++ b/src/tarski/fstrips/contingent/sensor.py @@ -1,4 +1,4 @@ -from ...syntax import CompoundFormula, Atom, Connective +from ...syntax import Atom, CompoundFormula, Connective from . import errors as err diff --git a/src/tarski/fstrips/errors.py b/src/tarski/fstrips/errors.py index 9c21bbee..6d045dc1 100644 --- a/src/tarski/fstrips/errors.py +++ b/src/tarski/fstrips/errors.py @@ -1,4 +1,4 @@ -from ..errors import TarskiError, DuplicateDefinition, UndefinedElement +from ..errors import DuplicateDefinition, TarskiError, UndefinedElement class IncompleteProblemError(TarskiError): diff --git a/src/tarski/fstrips/fstrips.py b/src/tarski/fstrips/fstrips.py index 9cde04d5..6b52c7bf 100644 --- a/src/tarski/fstrips/fstrips.py +++ b/src/tarski/fstrips/fstrips.py @@ -1,8 +1,8 @@ from enum import Enum -from typing import Union, List, Optional, Callable, Any +from typing import Any, Callable, List, Optional, Union -from ..syntax import CompoundTerm, Term, symref, top from .. import theories as ths +from ..syntax import CompoundTerm, Term, symref, top from .errors import InvalidEffectError diff --git a/src/tarski/fstrips/hybrid/__init__.py b/src/tarski/fstrips/hybrid/__init__.py index 7251944c..8b822261 100644 --- a/src/tarski/fstrips/hybrid/__init__.py +++ b/src/tarski/fstrips/hybrid/__init__.py @@ -1,5 +1,5 @@ -from . problem import HybridProblem as Problem -from . reaction import Reaction -from . differential_constraints import DifferentialConstraint +from .differential_constraints import DifferentialConstraint +from .problem import HybridProblem as Problem +from .reaction import Reaction __all__ = ['Problem', 'Reaction', 'DifferentialConstraint'] diff --git a/src/tarski/fstrips/hybrid/errors.py b/src/tarski/fstrips/hybrid/errors.py index 77834f0e..542a02cb 100644 --- a/src/tarski/fstrips/hybrid/errors.py +++ b/src/tarski/fstrips/hybrid/errors.py @@ -1,4 +1,4 @@ -from ... errors import DuplicateDefinition, UndefinedElement, SyntacticError +from ...errors import DuplicateDefinition, SyntacticError, UndefinedElement class DuplicateReactionDefinition(DuplicateDefinition): diff --git a/src/tarski/fstrips/hybrid/problem.py b/src/tarski/fstrips/hybrid/problem.py index ebfcf458..88b24ab3 100644 --- a/src/tarski/fstrips/hybrid/problem.py +++ b/src/tarski/fstrips/hybrid/problem.py @@ -1,10 +1,10 @@ from collections import OrderedDict +from .. import fstrips as fs from ..problem import Problem -from .reaction import Reaction -from .differential_constraints import DifferentialConstraint from . import errors as err -from .. import fstrips as fs +from .differential_constraints import DifferentialConstraint +from .reaction import Reaction class HybridProblem(Problem): diff --git a/src/tarski/fstrips/manipulation/simplify.py b/src/tarski/fstrips/manipulation/simplify.py index 89f7b7ba..fd066fad 100644 --- a/src/tarski/fstrips/manipulation/simplify.py +++ b/src/tarski/fstrips/manipulation/simplify.py @@ -1,18 +1,20 @@ import copy from functools import singledispatchmethod -from ..fstrips import AddEffect, DelEffect, UniversalEffect, FunctionalEffect -from ..ops import collect_all_symbols, compute_number_potential_groundings from ...evaluators.simple import evaluate from ...grounding.ops import approximate_symbol_fluency -from ...syntax.terms import Constant, Variable, CompoundTerm -from ...syntax.formulas import CompoundFormula, QuantifiedFormula, Atom, Tautology, Contradiction, Connective, is_neg, \ - Quantifier, unwrap_conjunction_or_atom, is_eq_atom, land, exists +from ...syntax import symref +from ...syntax.formulas import (Atom, CompoundFormula, Connective, + Contradiction, QuantifiedFormula, Quantifier, + Tautology, exists, is_eq_atom, is_neg, land, + unwrap_conjunction_or_atom) +from ...syntax.ops import flatten +from ...syntax.terms import CompoundTerm, Constant, Variable from ...syntax.transform.substitutions import substitute_expression from ...syntax.util import get_symbols from ...syntax.walker import FOLWalker -from ...syntax.ops import flatten -from ...syntax import symref +from ..fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect +from ..ops import collect_all_symbols, compute_number_potential_groundings def bool_to_expr(val): diff --git a/src/tarski/fstrips/ops.py b/src/tarski/fstrips/ops.py index 8ace57ee..e251e120 100644 --- a/src/tarski/fstrips/ops.py +++ b/src/tarski/fstrips/ops.py @@ -1,12 +1,12 @@ import operator -from functools import singledispatchmethod, reduce +from functools import reduce, singledispatchmethod from typing import Set, Union -from .walker import ProblemWalker -from ..syntax import Predicate, Function, CompoundTerm, Atom -from .problem import Problem +from ..syntax import Atom, CompoundTerm, Function, Predicate from . import fstrips as fs from .derived import Derived +from .problem import Problem +from .walker import ProblemWalker def collect_all_symbols(problem: Problem, include_builtin=False) -> Set[Union[Predicate, Function]]: diff --git a/src/tarski/fstrips/problem.py b/src/tarski/fstrips/problem.py index ba8759f6..c112d051 100644 --- a/src/tarski/fstrips/problem.py +++ b/src/tarski/fstrips/problem.py @@ -1,10 +1,10 @@ from collections import OrderedDict from .. import model +from . import errors as err +from . import fstrips as fs from .action import Action from .derived import Derived -from . import fstrips as fs -from . import errors as err class Problem: @@ -104,7 +104,8 @@ def create_fstrips_problem(language, problem_name=None, domain_name=None, evalua problem.language = language if evaluator is None: - from tarski.evaluators.simple import evaluate as evaluator # pylint: disable=import-outside-toplevel + from tarski.evaluators.simple import \ + evaluate as evaluator # pylint: disable=import-outside-toplevel problem.init = model.create(language, evaluator) return problem diff --git a/src/tarski/fstrips/representation.py b/src/tarski/fstrips/representation.py index aa928226..3ab03031 100644 --- a/src/tarski/fstrips/representation.py +++ b/src/tarski/fstrips/representation.py @@ -1,19 +1,23 @@ import copy -from typing import Set, Union, Tuple, Optional, List +from typing import List, Optional, Set, Tuple, Union from ..errors import TarskiError -from .problem import Problem -from . import fstrips as fs -from ..syntax import Formula, CompoundTerm, Atom, CompoundFormula, QuantifiedFormula, is_and, is_neg, exists, symref,\ - VariableBinding, Constant, Tautology, land, Term -from ..syntax.ops import collect_unique_nodes, flatten, free_variables, all_variables +from ..fstrips import (AddEffect, BaseEffect, DelEffect, FunctionalEffect, + LiteralEffect, SingleEffect, UniversalEffect) +from ..syntax import (Atom, CompoundFormula, CompoundTerm, Constant, Formula, + QuantifiedFormula, Tautology, Term, VariableBinding, + exists, is_and, is_neg, land, symref) +from ..syntax.ops import (all_variables, collect_unique_nodes, flatten, + free_variables) from ..syntax.sorts import compute_signature_bindings from ..syntax.symrefs import TermReference from ..syntax.transform.substitutions import enumerate_substitutions -from ..syntax.transform.substitutions import substitute_expression as fol_substitute_expression +from ..syntax.transform.substitutions import \ + substitute_expression as fol_substitute_expression from ..syntax.util import get_symbols -from ..fstrips import AddEffect, DelEffect, LiteralEffect, FunctionalEffect, UniversalEffect, BaseEffect, SingleEffect +from . import fstrips as fs from .action import Action +from .problem import Problem class RepresentationError(TarskiError): diff --git a/src/tarski/fstrips/visitors.py b/src/tarski/fstrips/visitors.py index 2b946219..809b1137 100644 --- a/src/tarski/fstrips/visitors.py +++ b/src/tarski/fstrips/visitors.py @@ -4,11 +4,11 @@ """ from typing import Set +from ..syntax import symref +from ..syntax.formulas import Atom, CompoundFormula, QuantifiedFormula from ..syntax.symrefs import TermReference from ..syntax.temporal import ltl -from ..syntax.formulas import CompoundFormula, Atom, QuantifiedFormula from ..syntax.terms import CompoundTerm -from ..syntax import symref class FluentHeuristic: diff --git a/src/tarski/fstrips/walker.py b/src/tarski/fstrips/walker.py index 777aca2b..075690ee 100644 --- a/src/tarski/fstrips/walker.py +++ b/src/tarski/fstrips/walker.py @@ -61,8 +61,11 @@ def _donothing(self, node): def run(self, expression, inplace=True): # Avoiding circular references: - from . import Action, BaseEffect, Problem # pylint: disable=import-outside-toplevel - from ..syntax import Formula, Term # pylint: disable=import-outside-toplevel # Avoiding circular references + from ..syntax import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Formula, Term) + from . import (Action, # pylint: disable=import-outside-toplevel + BaseEffect, Problem) + # Simply dispatch according to type expression = expression if inplace else copy.deepcopy(expression) if isinstance(expression, (Formula, Term)): @@ -95,7 +98,8 @@ def visit_action(self, node, inplace=False): return node def visit_effect(self, effect, inplace=True): - from . import AddEffect, DelEffect, UniversalEffect, FunctionalEffect # pylint: disable=import-outside-toplevel + from . import (AddEffect, # pylint: disable=import-outside-toplevel + DelEffect, FunctionalEffect, UniversalEffect) effect = effect if inplace else copy.deepcopy(effect) if isinstance(effect, (AddEffect, DelEffect)): @@ -116,8 +120,9 @@ def visit_effect(self, effect, inplace=True): return self.visit(effect) def visit_expression(self, node, inplace=True): - from ..syntax import CompoundFormula, QuantifiedFormula, Atom, Tautology, Contradiction, Constant, Variable,\ - CompoundTerm, IfThenElse # pylint: disable=import-outside-toplevel # Avoiding circular references + from ..syntax import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Atom, CompoundFormula, CompoundTerm, Constant, Contradiction, + IfThenElse, QuantifiedFormula, Tautology, Variable) node = node if inplace else copy.deepcopy(node) if isinstance(node, (Variable, Constant, Contradiction, Tautology)): diff --git a/src/tarski/grounding/__init__.py b/src/tarski/grounding/__init__.py index 660bfcaf..f277e8c6 100644 --- a/src/tarski/grounding/__init__.py +++ b/src/tarski/grounding/__init__.py @@ -1,6 +1,6 @@ -from .naive_grounding import ProblemGrounding, create_all_possible_state_variables, \ - NaiveGroundingStrategy from .lp_grounding import LPGroundingStrategy +from .naive_grounding import (NaiveGroundingStrategy, ProblemGrounding, + create_all_possible_state_variables) __all__ = [ 'create_all_possible_state_variables', diff --git a/src/tarski/grounding/common.py b/src/tarski/grounding/common.py index ffbd50ce..77234d7a 100644 --- a/src/tarski/grounding/common.py +++ b/src/tarski/grounding/common.py @@ -1,5 +1,6 @@ from ..errors import TarskiError -from ..syntax import Predicate, Function, Constant, termlists_are_equal, termlist_hash +from ..syntax import (Constant, Function, Predicate, termlist_hash, + termlists_are_equal) class StateVariableLite: diff --git a/src/tarski/grounding/lp_grounding.py b/src/tarski/grounding/lp_grounding.py index 0deae072..d7a5804b 100644 --- a/src/tarski/grounding/lp_grounding.py +++ b/src/tarski/grounding/lp_grounding.py @@ -1,13 +1,13 @@ """ Classes and methods related to the Logic-Program based grounding strategy of planning problems. """ -from ..utils.command import silentremove from ..grounding.ops import approximate_symbol_fluency -from ..reachability import create_reachability_lp, run_clingo, parse_model +from ..reachability import create_reachability_lp, parse_model, run_clingo from ..reachability.asp import GOAL -from .errors import ReachabilityLPUnsolvable from ..util import SymbolIndex +from ..utils.command import silentremove from .common import StateVariableLite +from .errors import ReachabilityLPUnsolvable class LPGroundingStrategy: @@ -89,7 +89,8 @@ def compute_action_groundings(problem, include_variable_inequalities=False): def ground_problem_schemas_into_plain_operators(problem, include_variable_inequalities=False): # pylint: disable=import-outside-toplevel - from ..syntax.transform.action_grounding import ground_schema_into_plain_operator_from_grounding + from ..syntax.transform.action_grounding import \ + ground_schema_into_plain_operator_from_grounding action_groundings = compute_action_groundings(problem, include_variable_inequalities) operators = [] for action_name, groundings in action_groundings.items(): diff --git a/src/tarski/grounding/naive/constraints.py b/src/tarski/grounding/naive/constraints.py index c7fe7052..cf0f48fe 100644 --- a/src/tarski/grounding/naive/constraints.py +++ b/src/tarski/grounding/naive/constraints.py @@ -1,9 +1,10 @@ import itertools -from ...syntax import QuantifiedFormula, Quantifier, create_substitution, substitute_expression -from ...syntax.transform import NegatedBuiltinAbsorption, CNFTransformation,\ - QuantifierEliminationMode, remove_quantifiers +from ...syntax import (QuantifiedFormula, Quantifier, create_substitution, + substitute_expression) from ...syntax.ops import all_variables +from ...syntax.transform import (CNFTransformation, NegatedBuiltinAbsorption, + QuantifierEliminationMode, remove_quantifiers) from ...util import SymbolIndex from . import instantiation diff --git a/src/tarski/grounding/naive_grounding.py b/src/tarski/grounding/naive_grounding.py index 388d6fa4..37f4a01b 100644 --- a/src/tarski/grounding/naive_grounding.py +++ b/src/tarski/grounding/naive_grounding.py @@ -4,14 +4,15 @@ import itertools from typing import List -from ..grounding.ops import approximate_symbol_fluency -from ..syntax import Constant, Variable, CompoundTerm, Atom, create_substitution, termlists_are_equal, termlist_hash from ..errors import DuplicateDefinition -from .errors import UnableToGroundError -from .common import StateVariableLite +from ..fstrips.visitors import FluentHeuristic, FluentSymbolCollector +from ..grounding.ops import approximate_symbol_fluency +from ..syntax import (Atom, CompoundTerm, Constant, Variable, + create_substitution, termlist_hash, termlists_are_equal) from ..syntax.transform.substitutions import substitute_expression from ..util import SymbolIndex -from ..fstrips.visitors import FluentSymbolCollector, FluentHeuristic +from .common import StateVariableLite +from .errors import UnableToGroundError class ProblemGrounding: diff --git a/src/tarski/grounding/ops.py b/src/tarski/grounding/ops.py index 300a080f..4b9bf9c6 100644 --- a/src/tarski/grounding/ops.py +++ b/src/tarski/grounding/ops.py @@ -1,5 +1,5 @@ -from ..syntax.util import get_symbols from ..fstrips.ops import collect_affected_symbols +from ..syntax.util import get_symbols def approximate_symbol_fluency(problem, include_builtin=False): diff --git a/src/tarski/io/_fstrips/common.py b/src/tarski/io/_fstrips/common.py index ab28ec0c..5cfec3fd 100644 --- a/src/tarski/io/_fstrips/common.py +++ b/src/tarski/io/_fstrips/common.py @@ -1,9 +1,9 @@ +from ... import theories from ...errors import TarskiError from ...fstrips import FunctionalEffect from ...fstrips.action import AdditiveActionCost, generate_zero_action_cost from ...fstrips.representation import is_typed_problem -from ...syntax import Interval, CompoundTerm, Tautology, BuiltinFunctionSymbol -from ... import theories +from ...syntax import BuiltinFunctionSymbol, CompoundTerm, Interval, Tautology from ...syntax.util import get_symbols from ...theories import Theory diff --git a/src/tarski/io/_fstrips/parser/lexer.py b/src/tarski/io/_fstrips/parser/lexer.py index 6e0def01..65eadf2e 100644 --- a/src/tarski/io/_fstrips/parser/lexer.py +++ b/src/tarski/io/_fstrips/parser/lexer.py @@ -1,8 +1,9 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 -from antlr4 import * +import sys from io import StringIO from typing.io import TextIO -import sys + +from antlr4 import * def serializedATN(): diff --git a/src/tarski/io/_fstrips/parser/listener.py b/src/tarski/io/_fstrips/parser/listener.py index 63dd4e21..9b87c926 100644 --- a/src/tarski/io/_fstrips/parser/listener.py +++ b/src/tarski/io/_fstrips/parser/listener.py @@ -1,5 +1,6 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 from antlr4 import * + if __name__ is not None and "." in __name__: from .parser import fstripsParser else: diff --git a/src/tarski/io/_fstrips/parser/parser.py b/src/tarski/io/_fstrips/parser/parser.py index 654357fe..c716269f 100644 --- a/src/tarski/io/_fstrips/parser/parser.py +++ b/src/tarski/io/_fstrips/parser/parser.py @@ -1,8 +1,10 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 -from antlr4 import * +import sys from io import StringIO from typing.io import TextIO -import sys + +from antlr4 import * + def serializedATN(): with StringIO() as buf: diff --git a/src/tarski/io/_fstrips/parser/visitor.py b/src/tarski/io/_fstrips/parser/visitor.py index e61738b6..d6acf522 100644 --- a/src/tarski/io/_fstrips/parser/visitor.py +++ b/src/tarski/io/_fstrips/parser/visitor.py @@ -1,5 +1,6 @@ # Generated from /home/frances/projects/code/tarski/utils/parsers/grammars/fstrips.g4 by ANTLR 4.7.1 from antlr4 import * + if __name__ is not None and "." in __name__: from .parser import fstripsParser else: diff --git a/src/tarski/io/_fstrips/reader.py b/src/tarski/io/_fstrips/reader.py index effae36e..ca2ff482 100644 --- a/src/tarski/io/_fstrips/reader.py +++ b/src/tarski/io/_fstrips/reader.py @@ -5,19 +5,22 @@ import copy import logging -from antlr4 import FileStream, CommonTokenStream, InputStream +from antlr4 import CommonTokenStream, FileStream, InputStream from antlr4.error.ErrorListener import ErrorListener -from .common import parse_number, process_requirements, create_sort, process_cost_effects, LowerCasingStreamWrapper from ...errors import SyntacticError -from ...fstrips import DelEffect, AddEffect, FunctionalEffect, UniversalEffect, OptimizationMetric, OptimizationType -from ...syntax import CompoundFormula, Connective, neg, Tautology, implies, exists, forall, Term, Interval -from ...syntax.builtins import get_predicate_from_symbol, get_function_from_symbol +from ...fstrips import (AddEffect, DelEffect, FunctionalEffect, + OptimizationMetric, OptimizationType, UniversalEffect) +from ...syntax import (CompoundFormula, Connective, Interval, Tautology, Term, + exists, forall, implies, neg) +from ...syntax.builtins import (get_function_from_symbol, + get_predicate_from_symbol) from ...syntax.formulas import VariableBinding - -from .parser.visitor import fstripsVisitor +from .common import (LowerCasingStreamWrapper, create_sort, parse_number, + process_cost_effects, process_requirements) from .parser.lexer import fstripsLexer from .parser.parser import fstripsParser +from .parser.visitor import fstripsVisitor class FStripsParser(fstripsVisitor): diff --git a/src/tarski/io/fstrips.py b/src/tarski/io/fstrips.py index 033d945f..c2c5e482 100644 --- a/src/tarski/io/fstrips.py +++ b/src/tarski/io/fstrips.py @@ -1,24 +1,23 @@ import logging from collections import defaultdict -from typing import Optional, List +from typing import List, Optional +from ..fstrips import (AddEffect, DelEffect, FunctionalEffect, IncreaseEffect, + UniversalEffect, create_fstrips_problem, language) from ..fstrips.action import AdditiveActionCost -from ..theories import load_theory, Theory -from .common import load_tpl from ..model import ExtensionalFunctionDefinition -from ..syntax import Tautology, Contradiction, Atom, CompoundTerm, CompoundFormula, QuantifiedFormula, \ - Term, Variable, Constant, Formula, symref, BuiltinPredicateSymbol -from ..syntax.sorts import parent, Interval, ancestors - -from ._fstrips.common import tarski_to_pddl_type, get_requirements_string, create_number_type, uniformize_costs -from ..fstrips import create_fstrips_problem, language, FunctionalEffect, AddEffect, DelEffect, IncreaseEffect,\ - UniversalEffect - -from ._fstrips.reader import FStripsParser - +from ..syntax import (Atom, BuiltinPredicateSymbol, CompoundFormula, + CompoundTerm, Constant, Contradiction, Formula, + QuantifiedFormula, Tautology, Term, Variable, symref) +from ..syntax.sorts import Interval, ancestors, parent +from ..theories import Theory, load_theory +from ._fstrips.common import (create_number_type, get_requirements_string, + tarski_to_pddl_type, uniformize_costs) # Leave the next import so that it can be imported from the outside without warnings of importing a private module # pylint: disable=unused-import from ._fstrips.reader import ParsingError # noqa: F401 +from ._fstrips.reader import FStripsParser +from .common import load_tpl class FstripsReader: diff --git a/src/tarski/io/pddl/instance.py b/src/tarski/io/pddl/instance.py index 4d343fea..69e11275 100644 --- a/src/tarski/io/pddl/instance.py +++ b/src/tarski/io/pddl/instance.py @@ -7,17 +7,15 @@ # PDDL parser # ---------------------------------------------------------------------------------------------------------------------- -from collections import namedtuple, OrderedDict -from typing import Tuple +from collections import OrderedDict, namedtuple from enum import Enum +from typing import Tuple import tarski as tsk from tarski.io.pddl.errors import UnsupportedFeature -from tarski.theories import Theory -from tarski.syntax import Variable, Sort +from tarski.syntax import Sort, Variable, symref from tarski.syntax.sorts import Interval, int_encode_fn -from tarski.syntax import symref - +from tarski.theories import Theory AssignmentEffectData = namedtuple('AssignmentEffectData', ['lhs', 'rhs']) EventData = namedtuple('EventData', ['pre', 'post']) diff --git a/src/tarski/io/pddl/lexer.py b/src/tarski/io/pddl/lexer.py index 24f057b2..35c05a35 100644 --- a/src/tarski/io/pddl/lexer.py +++ b/src/tarski/io/pddl/lexer.py @@ -7,9 +7,10 @@ # PDDL tokenizer # ---------------------------------------------------------------------------------------------------------------------- -import ply.lex as lex # type: ignore import re +import ply.lex as lex # type: ignore + # helper definitions alpha = r'[A-Za-z]' num = r'[0-9]' diff --git a/src/tarski/io/pddl/parser.py b/src/tarski/io/pddl/parser.py index ea933477..675fd9f6 100644 --- a/src/tarski/io/pddl/parser.py +++ b/src/tarski/io/pddl/parser.py @@ -11,11 +11,12 @@ from ply import yacc # type: ignore -from tarski.syntax import CompoundTerm, Term, land, lor, neg, QuantifiedFormula, Quantifier from tarski.io.pddl import Features, supported_features -from tarski.io.pddl.lexer import PDDLlex -from tarski.io.pddl.instance import * from tarski.io.pddl.errors import * +from tarski.io.pddl.instance import * +from tarski.io.pddl.lexer import PDDLlex +from tarski.syntax import (CompoundTerm, QuantifiedFormula, Quantifier, Term, + land, lor, neg) class PDDLparser: diff --git a/src/tarski/io/rddl.py b/src/tarski/io/rddl.py index 4440c92b..2c713c73 100644 --- a/src/tarski/io/rddl.py +++ b/src/tarski/io/rddl.py @@ -4,18 +4,22 @@ from enum import Enum from .. import modules -from .common import load_tpl +from ..errors import LanguageError +from ..evaluators.simple import evaluate from ..fol import FirstOrderLanguage -from ..syntax import implies, land, lor, neg, Connective, Quantifier, CompoundTerm, Interval, Atom, IfThenElse, \ - Contradiction, Tautology, CompoundFormula, forall, ite, AggregateCompoundTerm, QuantifiedFormula, Term, Function, \ - Variable, Predicate, Constant, Formula, builtins +from ..model import Model +from ..syntax import (AggregateCompoundTerm, Atom, CompoundFormula, + CompoundTerm, Connective, Constant, Contradiction, + Formula, Function, IfThenElse, Interval, Predicate, + QuantifiedFormula, Quantifier, Tautology, Term, Variable) from ..syntax import arithmetic as tm +from ..syntax import builtins, forall, implies, ite, land, lor, neg +from ..syntax.builtins import BuiltinFunctionSymbol as BFS +from ..syntax.builtins import BuiltinPredicateSymbol as BPS +from ..syntax.builtins import create_atom from ..syntax.temporal import ltl as tt -from ..syntax.builtins import create_atom, BuiltinPredicateSymbol as BPS, BuiltinFunctionSymbol as BFS -from ..model import Model -from ..evaluators.simple import evaluate -from ..errors import LanguageError from ..theories import Theory, language +from .common import load_tpl class TranslationError(Exception): @@ -444,7 +448,8 @@ def get_requirements(self): return ', '.join([str(r) for r in self.task.requirements]) def get_types(self): - from ..syntax.sorts import parent # pylint: disable=import-outside-toplevel # Avoiding circular references + from ..syntax.sorts import \ + parent # pylint: disable=import-outside-toplevel # Avoiding circular references type_decl_list = [] for S in self.task.L.sorts: if S.builtin or S.name == 'object': diff --git a/src/tarski/model.py b/src/tarski/model.py index 9c88c693..4f2f4233 100644 --- a/src/tarski/model.py +++ b/src/tarski/model.py @@ -2,7 +2,7 @@ from typing import Union from . import errors as err -from .syntax import Function, Constant, CompoundTerm, symref +from .syntax import CompoundTerm, Constant, Function, symref from .syntax.predicate import Predicate @@ -101,7 +101,8 @@ def set(self, term: CompoundTerm, value: Union[Constant, int, float], *args): def add(self, predicate, *args): """ """ - from .syntax import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .syntax import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references if isinstance(predicate, Atom): args = predicate.subterms predicate = predicate.predicate @@ -141,7 +142,8 @@ def list_all_extensions(self): This list *unwraps* the TermReference's used internally in this class back into plain Tarski terms, so that you can rely on the returned extensions being made up of Constants, Variables, etc., not TermReferences """ - from .syntax.util import get_symbols # pylint: disable=import-outside-toplevel # Avoiding circular references + from .syntax.util import \ + get_symbols # pylint: disable=import-outside-toplevel # Avoiding circular references exts = {k: [unwrap_tuple(tup) for tup in ext] for k, ext in self.predicate_extensions.items()} exts.update((k, [unwrap_tuple(point) + (value, ) for point, value in ext.data.items()]) for k, ext in self.function_extensions.items()) diff --git a/src/tarski/modules.py b/src/tarski/modules.py index ee6e18a9..a4234d18 100644 --- a/src/tarski/modules.py +++ b/src/tarski/modules.py @@ -24,7 +24,8 @@ def _import_numpy(): def _import_pyrddl_parser(): try: - from pyrddl.parser import RDDLParser # pylint: disable=import-outside-toplevel + from pyrddl.parser import \ + RDDLParser # pylint: disable=import-outside-toplevel return RDDLParser except ImportError: raise ImportError('The pyrddl module does not seem available. ' diff --git a/src/tarski/ndl/temporal.py b/src/tarski/ndl/temporal.py index bb4dcf70..ccc4edba 100644 --- a/src/tarski/ndl/temporal.py +++ b/src/tarski/ndl/temporal.py @@ -8,7 +8,8 @@ Proceedings of the 26th Int'l Joint Conference on Artificial Intelligence (IJCAI) 2017 """ -from ..syntax import Atom, CompoundTerm, CompoundFormula, Constant, symref, Connective, Tautology +from ..syntax import (Atom, CompoundFormula, CompoundTerm, Connective, + Constant, Tautology, symref) class NDLSyntaxError(Exception): diff --git a/src/tarski/rddl/task.py b/src/tarski/rddl/task.py index fcc4375d..b952d037 100644 --- a/src/tarski/rddl/task.py +++ b/src/tarski/rddl/task.py @@ -1,7 +1,7 @@ +from ..evaluators.simple import evaluate from ..fol import FirstOrderLanguage from ..io import rddl from ..model import Model -from ..evaluators.simple import evaluate class Task: diff --git a/src/tarski/reachability/__init__.py b/src/tarski/reachability/__init__.py index 96b5fd27..80a17208 100644 --- a/src/tarski/reachability/__init__.py +++ b/src/tarski/reachability/__init__.py @@ -1,4 +1,4 @@ from .asp import create_reachability_lp -from .clingo_wrapper import run_clingo, parse_model +from .clingo_wrapper import parse_model, run_clingo __all__ = ['create_reachability_lp', 'run_clingo', 'parse_model'] diff --git a/src/tarski/reachability/asp.py b/src/tarski/reachability/asp.py index ecdd379b..9a374b4e 100644 --- a/src/tarski/reachability/asp.py +++ b/src/tarski/reachability/asp.py @@ -3,15 +3,18 @@ """ import itertools +from ..fstrips import (AddEffect, DelEffect, FunctionalEffect, Problem, + SingleEffect) from ..fstrips.action import AdditiveActionCost -from ..syntax.transform import remove_quantifiers, QuantifierEliminationMode +from ..fstrips.representation import (expand_universal_effect, + identify_cost_related_functions) +from ..syntax import (Atom, BuiltinPredicateSymbol, CompoundFormula, + CompoundTerm, Connective, Constant, Formula, + QuantifiedFormula, Quantifier, Tautology, Term, Variable) from ..syntax.builtins import symbol_complements from ..syntax.ops import free_variables -from ..syntax import Formula, Atom, CompoundFormula, Connective, Term, Variable, Constant, Tautology, \ - BuiltinPredicateSymbol, QuantifiedFormula, Quantifier, CompoundTerm -from ..syntax.sorts import parent, Interval -from ..fstrips import Problem, SingleEffect, AddEffect, DelEffect, FunctionalEffect -from ..fstrips.representation import identify_cost_related_functions, expand_universal_effect +from ..syntax.sorts import Interval, parent +from ..syntax.transform import QuantifierEliminationMode, remove_quantifiers GOAL = "goal" diff --git a/src/tarski/reachability/clingo_wrapper.py b/src/tarski/reachability/clingo_wrapper.py index ec99c7bd..43ea7323 100644 --- a/src/tarski/reachability/clingo_wrapper.py +++ b/src/tarski/reachability/clingo_wrapper.py @@ -1,14 +1,15 @@ import logging import os -import sys import shutil +import sys import tempfile -from pathlib import Path from collections import defaultdict +from importlib.util import find_spec +from pathlib import Path -from ..errors import CommandNotFoundError, ExternalCommandError, OutOfMemoryError, OutOfTimeError, ArgumentError +from ..errors import (ArgumentError, CommandNotFoundError, + ExternalCommandError, OutOfMemoryError, OutOfTimeError) from ..utils import command as cmd -from importlib.util import find_spec def get_gringo_command(): diff --git a/src/tarski/sas/__init__.py b/src/tarski/sas/__init__.py index de521b53..7a152e82 100644 --- a/src/tarski/sas/__init__.py +++ b/src/tarski/sas/__init__.py @@ -1,6 +1,6 @@ -from .action import Effect, Action +from .action import Action, Effect +from .helper import make_domain from .temporal import TemporalAction from .variable import Variable -from .helper import make_domain __all__ = ['Effect', 'Action', 'TemporalAction', 'Variable', 'make_domain'] diff --git a/src/tarski/sas/action.py b/src/tarski/sas/action.py index ad575888..3bb45be0 100644 --- a/src/tarski/sas/action.py +++ b/src/tarski/sas/action.py @@ -6,6 +6,7 @@ - Representation of action costs is not baked into the class """ from typing import Union + from tarski.syntax import Term, symref diff --git a/src/tarski/sas/helper.py b/src/tarski/sas/helper.py index a9758a10..f541c19a 100644 --- a/src/tarski/sas/helper.py +++ b/src/tarski/sas/helper.py @@ -1,9 +1,10 @@ """ SAS+ Modeling helper """ -from tarski.syntax import Term, symref, land from typing import List +from tarski.syntax import Term, land, symref + def make_domain(dom: List[Term], close: bool = False): """ diff --git a/src/tarski/sas/variable.py b/src/tarski/sas/variable.py index dba897c2..804b4414 100644 --- a/src/tarski/sas/variable.py +++ b/src/tarski/sas/variable.py @@ -1,10 +1,11 @@ """ SAS+ Variables - lean implementation """ +from typing import List, Union + +from tarski.sas.helper import make_domain from tarski.syntax import Term, symref from tarski.syntax.symrefs import TermReference -from tarski.sas.helper import make_domain -from typing import List, Union class InvalidValue(Exception): diff --git a/src/tarski/search/__init__.py b/src/tarski/search/__init__.py index 30f5dc0e..871bc0f3 100644 --- a/src/tarski/search/__init__.py +++ b/src/tarski/search/__init__.py @@ -1,4 +1,4 @@ -from .model import SearchModel, GroundForwardSearchModel from .blind import BreadthFirstSearch +from .model import GroundForwardSearchModel, SearchModel __all__ = ['SearchModel', 'GroundForwardSearchModel', 'BreadthFirstSearch'] diff --git a/src/tarski/search/model.py b/src/tarski/search/model.py index ded1cbf6..38d24848 100644 --- a/src/tarski/search/model.py +++ b/src/tarski/search/model.py @@ -1,5 +1,5 @@ -from .operations import is_applicable, progress from ..evaluators.simple import evaluate +from .operations import is_applicable, progress class SearchModel: diff --git a/src/tarski/search/operations.py b/src/tarski/search/operations.py index 58f5336d..973f263f 100644 --- a/src/tarski/search/operations.py +++ b/src/tarski/search/operations.py @@ -1,7 +1,7 @@ import copy -from ..fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect from ..evaluators.simple import evaluate +from ..fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect from ..fstrips.representation import substitute_expression from ..syntax.transform.substitutions import enumerate_substitutions diff --git a/src/tarski/syntax/__init__.py b/src/tarski/syntax/__init__.py index 20a15356..9bb4cb1a 100644 --- a/src/tarski/syntax/__init__.py +++ b/src/tarski/syntax/__init__.py @@ -1,14 +1,16 @@ +from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol +from .formulas import (Atom, CompoundFormula, Connective, Contradiction, + Formula, QuantifiedFormula, Quantifier, Tautology, + VariableBinding, bot, equiv, exists, forall, implies, + is_and, is_neg, is_or, land, lor, neg, top) from .function import Function from .predicate import Predicate -from .sorts import Sort, Interval, inclusion_closure -from .terms import Term, Constant, Variable, CompoundTerm, IfThenElse, ite, AggregateCompoundTerm -from .util import termlists_are_equal, termlist_hash -from .formulas import land, lor, neg, implies, forall, exists, equiv, Connective, Atom, Formula, \ - CompoundFormula, QuantifiedFormula, Tautology, Contradiction, top, bot, Quantifier, VariableBinding, \ - is_neg, is_and, is_or -from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol +from .sorts import Interval, Sort, inclusion_closure from .symrefs import symref +from .terms import (AggregateCompoundTerm, CompoundTerm, Constant, IfThenElse, + Term, Variable, ite) from .transform.substitutions import create_substitution, substitute_expression +from .util import termlist_hash, termlists_are_equal __all__ = [ 'AggregateCompoundTerm', diff --git a/src/tarski/syntax/algebra/matrix.py b/src/tarski/syntax/algebra/matrix.py index 6b3c8be6..a13209b7 100644 --- a/src/tarski/syntax/algebra/matrix.py +++ b/src/tarski/syntax/algebra/matrix.py @@ -1,7 +1,7 @@ +from ... import errors as err from ... import modules -from ...syntax import Term, Constant +from ...syntax import Constant, Term from ...syntax.sorts import Sort -from ... import errors as err class Matrix(Term): diff --git a/src/tarski/syntax/arithmetic/__init__.py b/src/tarski/syntax/arithmetic/__init__.py index 7291b2f0..38568274 100644 --- a/src/tarski/syntax/arithmetic/__init__.py +++ b/src/tarski/syntax/arithmetic/__init__.py @@ -1,15 +1,16 @@ # pylint: disable=redefined-builtin -import itertools import copy +import itertools -from ..transform.substitutions import substitute_expression -from ...syntax import Term, AggregateCompoundTerm, CompoundTerm, Constant, Variable, IfThenElse, create_substitution -from ...syntax.algebra import Matrix from ... import errors as err -from ... grounding.naive import instantiation -from ..builtins import BuiltinFunctionSymbol, get_arithmetic_binary_functions from ... import modules +from ...grounding.naive import instantiation +from ...syntax import (AggregateCompoundTerm, CompoundTerm, Constant, + IfThenElse, Term, Variable, create_substitution) +from ...syntax.algebra import Matrix +from ..builtins import BuiltinFunctionSymbol, get_arithmetic_binary_functions +from ..transform.substitutions import substitute_expression def sumterm(*args): diff --git a/src/tarski/syntax/arithmetic/random.py b/src/tarski/syntax/arithmetic/random.py index 81656993..e7fdd794 100644 --- a/src/tarski/syntax/arithmetic/random.py +++ b/src/tarski/syntax/arithmetic/random.py @@ -1,5 +1,5 @@ -from ..builtins import BuiltinFunctionSymbol as bfs from ... import modules +from ..builtins import BuiltinFunctionSymbol as bfs def normal(mu, sigma): diff --git a/src/tarski/syntax/builtins.py b/src/tarski/syntax/builtins.py index b2cd6828..935ecd8d 100644 --- a/src/tarski/syntax/builtins.py +++ b/src/tarski/syntax/builtins.py @@ -58,7 +58,8 @@ def is_builtin_predicate(predicate): def create_atom(lang, symbol: BuiltinPredicateSymbol, lhs, rhs): - from .formulas import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .formulas import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references predicate = lang.get_predicate(symbol) return Atom(predicate, [lhs, rhs]) @@ -67,7 +68,8 @@ def negate_builtin_atom(atom): """ Given an atom based on a built-in predicate, return an equivalent atom with the negation absorbed. If the atom is not based on a built-in predicate, return the atom unchanged. """ - from .formulas import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .formulas import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references if isinstance(atom, Atom) and atom.predicate.builtin: pred = atom.predicate return create_atom(pred.language, pred.symbol.complement(), *atom.subterms) diff --git a/src/tarski/syntax/factory.py b/src/tarski/syntax/factory.py index bc10c86b..0c8c71ac 100644 --- a/src/tarski/syntax/factory.py +++ b/src/tarski/syntax/factory.py @@ -1,7 +1,7 @@ from .. import errors as err +from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol from .formulas import Atom from .terms import Term -from .builtins import BuiltinPredicateSymbol, BuiltinFunctionSymbol def check_same_language(lhs, rhs): diff --git a/src/tarski/syntax/formulas.py b/src/tarski/syntax/formulas.py index abb59cbb..cfa3994b 100644 --- a/src/tarski/syntax/formulas.py +++ b/src/tarski/syntax/formulas.py @@ -4,9 +4,9 @@ from .. import errors as err from .builtins import BuiltinPredicateSymbol -from .terms import Variable, Term -from .util import termlists_are_equal, termlist_hash from .predicate import Predicate +from .terms import Term, Variable +from .util import termlist_hash, termlists_are_equal class Connective(Enum): diff --git a/src/tarski/syntax/function.py b/src/tarski/syntax/function.py index 627e0df2..f03fcb52 100644 --- a/src/tarski/syntax/function.py +++ b/src/tarski/syntax/function.py @@ -54,5 +54,6 @@ def __str__(self): __repr__ = __str__ def __call__(self, *args): - from .terms import CompoundTerm # pylint: disable=import-outside-toplevel # Avoiding circular references + from .terms import \ + CompoundTerm # pylint: disable=import-outside-toplevel # Avoiding circular references return CompoundTerm(self, args) diff --git a/src/tarski/syntax/ops.py b/src/tarski/syntax/ops.py index f5252907..4611e55e 100644 --- a/src/tarski/syntax/ops.py +++ b/src/tarski/syntax/ops.py @@ -1,12 +1,12 @@ import itertools -from .walker import FOLWalker from .. import modules -from .sorts import children, compute_direct_sort_map, Interval -from .visitors import CollectFreeVariables -from .terms import Term, Constant, Variable, CompoundTerm from .formulas import CompoundFormula, Connective +from .sorts import Interval, children, compute_direct_sort_map from .symrefs import symref +from .terms import CompoundTerm, Constant, Term, Variable +from .visitors import CollectFreeVariables +from .walker import FOLWalker def cast_to_closest_common_numeric_ancestor(lang, lhs, rhs): diff --git a/src/tarski/syntax/predicate.py b/src/tarski/syntax/predicate.py index 72c17a13..4d50fe3c 100644 --- a/src/tarski/syntax/predicate.py +++ b/src/tarski/syntax/predicate.py @@ -51,5 +51,6 @@ def __str__(self): __repr__ = __str__ def __call__(self, *args): - from .formulas import Atom # pylint: disable=import-outside-toplevel # Avoiding circular references + from .formulas import \ + Atom # pylint: disable=import-outside-toplevel # Avoiding circular references return Atom(self, args) diff --git a/src/tarski/syntax/sorts.py b/src/tarski/syntax/sorts.py index 2f08275a..dc6cf370 100644 --- a/src/tarski/syntax/sorts.py +++ b/src/tarski/syntax/sorts.py @@ -52,7 +52,8 @@ def cast(self, x): def to_constant(self, x): """ Cast the given element to a constant of this sort. """ - from . import Constant, Variable # pylint: disable=import-outside-toplevel # Avoiding circular references + from . import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Constant, Variable) if isinstance(x, (Constant, Variable)) and x.sort == self: return x if x not in self._domain: @@ -116,7 +117,8 @@ def cast(self, x): def to_constant(self, x): """ Cast the given element to a constant of this sort. """ - from . import Constant, Variable # pylint: disable=import-outside-toplevel # Avoiding circular references + from . import ( # pylint: disable=import-outside-toplevel # Avoiding circular references + Constant, Variable) if isinstance(x, (Constant, Variable)) and x.sort == self: return x return Constant(self.cast(x), self) @@ -157,7 +159,8 @@ def dump(self): def domain(self): if self.builtin or self.upper_bound - self.lower_bound > 9999: # Yes, very hacky raise err.TarskiError(f'Cannot iterate over interval with range [{self.lower_bound}, {self.upper_bound}]') - from . import Constant # pylint: disable=import-outside-toplevel # Avoiding circular references + from . import \ + Constant # pylint: disable=import-outside-toplevel # Avoiding circular references return (Constant(x, self) for x in range(self.lower_bound, self.upper_bound + 1)) diff --git a/src/tarski/syntax/temporal/ltl.py b/src/tarski/syntax/temporal/ltl.py index daa9b5b5..bc7e870e 100644 --- a/src/tarski/syntax/temporal/ltl.py +++ b/src/tarski/syntax/temporal/ltl.py @@ -1,6 +1,7 @@ from enum import Enum + from ... import errors as err -from ..formulas import Formula, Connective, CompoundFormula, lor +from ..formulas import CompoundFormula, Connective, Formula, lor class TemporalConnective(Enum): diff --git a/src/tarski/syntax/terms.py b/src/tarski/syntax/terms.py index 0ce44c5f..85dbb036 100644 --- a/src/tarski/syntax/terms.py +++ b/src/tarski/syntax/terms.py @@ -1,9 +1,9 @@ from typing import Tuple -from .util import termlists_are_equal, termlist_hash -from .sorts import Sort, parent, Interval from .. import errors as err -from .builtins import BuiltinPredicateSymbol, BuiltinFunctionSymbol +from .builtins import BuiltinFunctionSymbol, BuiltinPredicateSymbol +from .sorts import Interval, Sort, parent +from .util import termlist_hash, termlists_are_equal class Term: diff --git a/src/tarski/syntax/transform/__init__.py b/src/tarski/syntax/transform/__init__.py index a42f689d..10a97565 100644 --- a/src/tarski/syntax/transform/__init__.py +++ b/src/tarski/syntax/transform/__init__.py @@ -1,8 +1,10 @@ -from .nnf import NNFTransformation, to_negation_normal_form from .cnf import CNFTransformation, to_conjunctive_normal_form -from .prenex import PrenexTransformation, to_prenex_negation_normal_form -from .quantifier_elimination import QuantifierElimination, QuantifierEliminationMode, remove_quantifiers from .neg_builtin import NegatedBuiltinAbsorption +from .nnf import NNFTransformation, to_negation_normal_form +from .prenex import PrenexTransformation, to_prenex_negation_normal_form +from .quantifier_elimination import (QuantifierElimination, + QuantifierEliminationMode, + remove_quantifiers) __all__ = [ 'CNFTransformation', diff --git a/src/tarski/syntax/transform/action_grounding.py b/src/tarski/syntax/transform/action_grounding.py index 09b14900..52156c5b 100644 --- a/src/tarski/syntax/transform/action_grounding.py +++ b/src/tarski/syntax/transform/action_grounding.py @@ -1,6 +1,6 @@ -from ...fstrips.representation import substitute_expression -from ...syntax import symref, Constant, create_substitution, VariableBinding from ...fstrips.action import Action, PlainOperator +from ...fstrips.representation import substitute_expression +from ...syntax import Constant, VariableBinding, create_substitution, symref def ground_schema_into_plain_operator(action: Action, substitution): diff --git a/src/tarski/syntax/transform/cnf.py b/src/tarski/syntax/transform/cnf.py index 487892a3..b204d6a3 100644 --- a/src/tarski/syntax/transform/cnf.py +++ b/src/tarski/syntax/transform/cnf.py @@ -3,7 +3,6 @@ """ from ..formulas import CompoundFormula, Connective, QuantifiedFormula from ..transform import to_negation_normal_form - from .errors import TransformationError diff --git a/src/tarski/syntax/transform/neg_builtin.py b/src/tarski/syntax/transform/neg_builtin.py index c532acf9..a322fffe 100644 --- a/src/tarski/syntax/transform/neg_builtin.py +++ b/src/tarski/syntax/transform/neg_builtin.py @@ -3,8 +3,8 @@ """ import copy -from ..formulas import Connective, Atom, QuantifiedFormula, CompoundFormula from ..builtins import negate_builtin_atom +from ..formulas import Atom, CompoundFormula, Connective, QuantifiedFormula class NegatedBuiltinAbsorption: diff --git a/src/tarski/syntax/transform/nnf.py b/src/tarski/syntax/transform/nnf.py index 945dff29..97efd119 100644 --- a/src/tarski/syntax/transform/nnf.py +++ b/src/tarski/syntax/transform/nnf.py @@ -4,8 +4,9 @@ import copy from ... import errors as err -from ..formulas import neg, Formula, QuantifiedFormula, CompoundFormula, Connective, negate_quantifier, Tautology, \ - Contradiction, Atom +from ..formulas import (Atom, CompoundFormula, Connective, Contradiction, + Formula, QuantifiedFormula, Tautology, neg, + negate_quantifier) class NNFTransformation: diff --git a/src/tarski/syntax/transform/prenex.py b/src/tarski/syntax/transform/prenex.py index 807f9736..5781f3f6 100644 --- a/src/tarski/syntax/transform/prenex.py +++ b/src/tarski/syntax/transform/prenex.py @@ -1,12 +1,12 @@ """ Rewrite formulas into prenex negation normal form """ -from .substitutions import substitute_expression +from ..formulas import (CompoundFormula, Connective, QuantifiedFormula, + Quantifier, lor) from ..symrefs import symref -from ..formulas import CompoundFormula, QuantifiedFormula, Connective, Quantifier, lor from ..transform.nnf import NNFTransformation - from .errors import TransformationError +from .substitutions import substitute_expression class PrenexTransformation: diff --git a/src/tarski/syntax/transform/quantifier_elimination.py b/src/tarski/syntax/transform/quantifier_elimination.py index f79ee892..156769c9 100644 --- a/src/tarski/syntax/transform/quantifier_elimination.py +++ b/src/tarski/syntax/transform/quantifier_elimination.py @@ -6,9 +6,10 @@ from enum import Enum from ... import errors as err -from .substitutions import create_substitution, substitute_expression -from ..formulas import land, lor, Quantifier, QuantifiedFormula, Atom, Tautology, Contradiction, CompoundFormula +from ..formulas import (Atom, CompoundFormula, Contradiction, + QuantifiedFormula, Quantifier, Tautology, land, lor) from .errors import TransformationError +from .substitutions import create_substitution, substitute_expression class QuantifierEliminationMode(Enum): @@ -59,7 +60,8 @@ def _recurse(self, phi): def _expand(self, phi: QuantifiedFormula, creator): # Avoiding circular references in the import: - from ...grounding.naive import instantiation # pylint: disable=import-outside-toplevel + from ...grounding.naive import \ + instantiation # pylint: disable=import-outside-toplevel card, syms, substs = instantiation.enumerate_groundings(phi.variables) if card == 0: raise TransformationError("quantifier elimination", phi, "No constants were defined!") diff --git a/src/tarski/syntax/transform/simplifications.py b/src/tarski/syntax/transform/simplifications.py index bd542627..a861df4d 100644 --- a/src/tarski/syntax/transform/simplifications.py +++ b/src/tarski/syntax/transform/simplifications.py @@ -1,6 +1,5 @@ -from ...syntax import Atom, CompoundFormula, Connective, Constant, CompoundTerm +from ...syntax import Atom, CompoundFormula, CompoundTerm, Connective, Constant from ...syntax.builtins import BuiltinPredicateSymbol - from .errors import TransformationError diff --git a/src/tarski/syntax/visitors.py b/src/tarski/syntax/visitors.py index d321a2e6..090f6fb4 100644 --- a/src/tarski/syntax/visitors.py +++ b/src/tarski/syntax/visitors.py @@ -1,4 +1,5 @@ -from tarski.syntax import symref, QuantifiedFormula, CompoundTerm, Variable, CompoundFormula, Atom +from tarski.syntax import (Atom, CompoundFormula, CompoundTerm, + QuantifiedFormula, Variable, symref) from tarski.syntax.formulas import is_eq_atom diff --git a/src/tarski/syntax/walker.py b/src/tarski/syntax/walker.py index d81ced1e..f5023708 100644 --- a/src/tarski/syntax/walker.py +++ b/src/tarski/syntax/walker.py @@ -52,8 +52,10 @@ def run(self, expression, inplace=True): def visit_expression(self, node, inplace=True): # pylint: disable=import-outside-toplevel # Avoiding circular references - from .formulas import CompoundFormula, QuantifiedFormula, Atom, Tautology, Contradiction - from .terms import Constant, Variable, CompoundTerm, IfThenElse # pylint: disable=import-outside-toplevel + from .formulas import (Atom, CompoundFormula, Contradiction, + QuantifiedFormula, Tautology) + from .terms import ( # pylint: disable=import-outside-toplevel + CompoundTerm, Constant, IfThenElse, Variable) node = node if inplace else copy.deepcopy(node) if isinstance(node, (Variable, Constant, Contradiction, Tautology)): diff --git a/src/tarski/theories.py b/src/tarski/theories.py index 0c53d95b..faa0ace0 100644 --- a/src/tarski/theories.py +++ b/src/tarski/theories.py @@ -1,14 +1,15 @@ """ Management of the theories (e.g. equality, etc.) associated to the FO languages """ from enum import Enum -from typing import Union, List, Optional +from typing import List, Optional, Union from tarski.errors import DuplicateTheoryDefinition -from .syntax.sorts import attach_arithmetic_sorts, build_the_bools + +from . import errors as err from .fol import FirstOrderLanguage -from .syntax import builtins, Term -from .syntax.factory import create_atom, create_arithmetic_term +from .syntax import Term, builtins +from .syntax.factory import create_arithmetic_term, create_atom from .syntax.ops import cast_to_closest_common_numeric_ancestor -from . import errors as err +from .syntax.sorts import attach_arithmetic_sorts, build_the_bools class Theory(Enum): diff --git a/src/tarski/utils/algorithms.py b/src/tarski/utils/algorithms.py index 7183ac14..c9e0cd05 100644 --- a/src/tarski/utils/algorithms.py +++ b/src/tarski/utils/algorithms.py @@ -1,5 +1,5 @@ import sys -from collections import deque, defaultdict +from collections import defaultdict, deque def transitive_closure(elements): diff --git a/src/tarski/utils/command.py b/src/tarski/utils/command.py index 5b555971..c5e2a58f 100644 --- a/src/tarski/utils/command.py +++ b/src/tarski/utils/command.py @@ -1,7 +1,7 @@ import errno import logging -import subprocess import os +import subprocess def count_file_lines(filename): # Might be a bit faster with a call to "wc -l" diff --git a/tests/analysis/test_csp.py b/tests/analysis/test_csp.py index ccf31833..0e61a219 100644 --- a/tests/analysis/test_csp.py +++ b/tests/analysis/test_csp.py @@ -1,7 +1,8 @@ """ Tests for the CSP analysis module """ -from tarski.analysis.csp import compute_schema_constraint_hypergraph, check_hypergraph_acyclicity +from tarski.analysis.csp import (check_hypergraph_acyclicity, + compute_schema_constraint_hypergraph) from tests.io.common import parse_benchmark_instance diff --git a/tests/benchmarks/test_benchmarks.py b/tests/benchmarks/test_benchmarks.py index cd224234..76c002d5 100644 --- a/tests/benchmarks/test_benchmarks.py +++ b/tests/benchmarks/test_benchmarks.py @@ -1,4 +1,5 @@ -from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem +from tarski.benchmarks.blocksworld import ( + generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem) from tarski.benchmarks.counters import generate_fstrips_counters_problem from tarski.syntax import is_and diff --git a/tests/common/gridworld.py b/tests/common/gridworld.py index 9d79b205..71bb416d 100644 --- a/tests/common/gridworld.py +++ b/tests/common/gridworld.py @@ -1,6 +1,6 @@ from tarski.fstrips import create_fstrips_problem, language -from tarski.theories import Theory from tarski.syntax import Tautology, land +from tarski.theories import Theory def generate_small_gridworld(): diff --git a/tests/common/parcprinter.py b/tests/common/parcprinter.py index 3aa6105c..e5140ff7 100644 --- a/tests/common/parcprinter.py +++ b/tests/common/parcprinter.py @@ -3,10 +3,10 @@ """ import tarski as tsk import tarski.model -from tarski.theories import Theory from tarski import fstrips as fs -from tarski.syntax import top, land from tarski.evaluators.simple import evaluate +from tarski.syntax import land, top +from tarski.theories import Theory def create_small_language(): diff --git a/tests/dl/test_concepts.py b/tests/dl/test_concepts.py index aeda5de9..67293f2f 100644 --- a/tests/dl/test_concepts.py +++ b/tests/dl/test_concepts.py @@ -4,8 +4,10 @@ import pytest import tarski.benchmarks.blocksworld -from tarski.dl import SyntacticFactory, PrimitiveRole, PrimitiveConcept, NominalConcept, StarRole, InverseRole, \ - ArityDLMismatch +from tarski.dl import (ArityDLMismatch, InverseRole, NominalConcept, + PrimitiveConcept, PrimitiveRole, StarRole, + SyntacticFactory) + from ..common import blocksworld diff --git a/tests/fol/test_interpretations.py b/tests/fol/test_interpretations.py index 5ee13cb2..86b282ba 100644 --- a/tests/fol/test_interpretations.py +++ b/tests/fol/test_interpretations.py @@ -1,17 +1,16 @@ +import pytest + import tarski import tarski.benchmarks.blocksworld import tarski.model +from tarski import errors, modules +from tarski.evaluators.simple import evaluate from tarski.fstrips import language from tarski.model import Model -from tarski import errors - -from ..common import numeric -from tarski.evaluators.simple import evaluate from tarski.syntax import Constant, ite, symref from tarski.theories import Theory -from tarski import modules -import pytest +from ..common import numeric try: sp = modules.scipy_special @@ -137,6 +136,7 @@ def test_special_function_abs(): def test_special_function_pow(): import numpy as np + from tarski.syntax.arithmetic import pow lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -148,6 +148,7 @@ def test_special_function_pow(): def test_special_function_sin(): import numpy as np + from tarski.syntax.arithmetic.special import sin lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -159,6 +160,7 @@ def test_special_function_sin(): def test_special_function_sqrt(): import numpy as np + from tarski.syntax.arithmetic import sqrt lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -170,6 +172,7 @@ def test_special_function_sqrt(): def test_special_function_cos(): import numpy as np + from tarski.syntax.arithmetic.special import cos lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -181,6 +184,7 @@ def test_special_function_cos(): def test_special_function_tan(): import numpy as np + from tarski.syntax.arithmetic.special import tan lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -192,6 +196,7 @@ def test_special_function_tan(): def test_special_function_atan(): import numpy as np + from tarski.syntax.arithmetic.special import atan lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -203,6 +208,7 @@ def test_special_function_atan(): def test_special_function_exp(): import numpy as np + from tarski.syntax.arithmetic.special import exp lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -214,6 +220,7 @@ def test_special_function_exp(): def test_special_function_log(): import numpy as np + from tarski.syntax.arithmetic.special import log lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -245,6 +252,7 @@ def test_special_function_erfc(): def test_special_function_sgn(): import numpy as np + from tarski.syntax.arithmetic.special import sgn lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) @@ -256,6 +264,7 @@ def test_special_function_sgn(): def test_random_function_normal(): import numpy as np + from tarski.syntax.arithmetic.random import normal np.random.seed(1234) # for repeatability lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL, Theory.RANDOM]) @@ -271,6 +280,7 @@ def test_random_function_normal(): def test_random_function_gamma(): import numpy as np + from tarski.syntax.arithmetic.random import gamma np.random.seed(1234) # for repeatability lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL, Theory.RANDOM]) @@ -286,6 +296,7 @@ def test_random_function_gamma(): def test_arcsin(): import numpy as np + from tarski.syntax.arithmetic.special import asin lang = tarski.fstrips.language(theories=[Theory.ARITHMETIC, Theory.SPECIAL]) model = Model(lang) diff --git a/tests/fol/test_sorts.py b/tests/fol/test_sorts.py index 94d498ad..4fe1fa24 100644 --- a/tests/fol/test_sorts.py +++ b/tests/fol/test_sorts.py @@ -6,7 +6,8 @@ from tarski.benchmarks.counters import generate_fstrips_counters_problem from tarski.syntax import symref from tarski.syntax.ops import compute_sort_id_assignment -from tarski.syntax.sorts import parent, ancestors, compute_signature_bindings, compute_direct_sort_map +from tarski.syntax.sorts import (ancestors, compute_direct_sort_map, + compute_signature_bindings, parent) from tarski.theories import Theory diff --git a/tests/fol/test_syntactic_analysis.py b/tests/fol/test_syntactic_analysis.py index 419576a2..aaec7bb9 100644 --- a/tests/fol/test_syntactic_analysis.py +++ b/tests/fol/test_syntactic_analysis.py @@ -1,5 +1,7 @@ -from tarski.syntax import neg, land, lor, exists, symref, forall, Variable, Constant, Atom -from tarski.syntax.ops import free_variables, flatten, collect_unique_nodes, all_variables +from tarski.syntax import (Atom, Constant, Variable, exists, forall, land, lor, + neg, symref) +from tarski.syntax.ops import (all_variables, collect_unique_nodes, flatten, + free_variables) from tests.common import tarskiworld from tests.common.blocksworld import generate_bw_loc_and_clear diff --git a/tests/fol/test_syntax.py b/tests/fol/test_syntax.py index b3237362..ff8d2fa2 100755 --- a/tests/fol/test_syntax.py +++ b/tests/fol/test_syntax.py @@ -3,15 +3,17 @@ import pytest -from tarski import theories, Term, Constant -from tarski.benchmarks.blocksworld import generate_strips_bw_language -from tarski.fstrips import fstrips -from tarski.syntax import symref, CompoundFormula, Atom, ite, AggregateCompoundTerm, CompoundTerm, lor, Tautology, \ - Contradiction, land, top, bot -from tarski.theories import Theory +from tarski import Constant, Term from tarski import errors as err from tarski import fstrips as fs +from tarski import theories +from tarski.benchmarks.blocksworld import generate_strips_bw_language +from tarski.fstrips import fstrips +from tarski.syntax import (AggregateCompoundTerm, Atom, CompoundFormula, + CompoundTerm, Contradiction, Tautology, bot, ite, + land, lor, symref, top) from tarski.syntax.algebra import Matrix +from tarski.theories import Theory from ..common import numeric diff --git a/tests/fstrips/contingent/localize.py b/tests/fstrips/contingent/localize.py index 71e29fa6..bd80ed8d 100644 --- a/tests/fstrips/contingent/localize.py +++ b/tests/fstrips/contingent/localize.py @@ -2,7 +2,6 @@ from tarski import fstrips as fs from tarski.fstrips import contingent from tarski.syntax import * - from tests.common.grid_navigation import generate_single_agent_language diff --git a/tests/fstrips/contingent/test_sensors.py b/tests/fstrips/contingent/test_sensors.py index 5f213af5..346b1a37 100644 --- a/tests/fstrips/contingent/test_sensors.py +++ b/tests/fstrips/contingent/test_sensors.py @@ -2,7 +2,6 @@ from tarski.fstrips import contingent from tarski.syntax import * - from tests.common import grid_navigation from tests.fstrips.contingent import localize diff --git a/tests/fstrips/hybrid/tasks.py b/tests/fstrips/hybrid/tasks.py index 5222b6af..fd229706 100644 --- a/tests/fstrips/hybrid/tasks.py +++ b/tests/fstrips/hybrid/tasks.py @@ -2,8 +2,8 @@ from tarski.fstrips import hybrid from tarski.syntax import * from tarski.syntax.arithmetic import summation - -from tests.common.numeric import generate_numeric_instance, generate_billiards_instance +from tests.common.numeric import (generate_billiards_instance, + generate_numeric_instance) def create_particles_world(): diff --git a/tests/fstrips/hybrid/test_differential.py b/tests/fstrips/hybrid/test_differential.py index aa86d27e..1c230371 100644 --- a/tests/fstrips/hybrid/test_differential.py +++ b/tests/fstrips/hybrid/test_differential.py @@ -1,6 +1,5 @@ from tarski.fstrips import hybrid from tarski.syntax import * - from tests.common.numeric import generate_numeric_instance diff --git a/tests/fstrips/hybrid/test_reactions.py b/tests/fstrips/hybrid/test_reactions.py index fe846d8f..32a5c8b0 100644 --- a/tests/fstrips/hybrid/test_reactions.py +++ b/tests/fstrips/hybrid/test_reactions.py @@ -1,9 +1,8 @@ +import tests.common.numeric as numeric from tarski import fstrips as fs from tarski.fstrips import hybrid from tarski.syntax import * -import tests.common.numeric as numeric - def test_reaction_creation(): from tarski.syntax.arithmetic import summation diff --git a/tests/fstrips/test_fstrips_operations.py b/tests/fstrips/test_fstrips_operations.py index 4ab428bb..f445bbfb 100644 --- a/tests/fstrips/test_fstrips_operations.py +++ b/tests/fstrips/test_fstrips_operations.py @@ -1,10 +1,10 @@ from tarski.benchmarks.blocksworld import generate_fstrips_bw_language -from tarski.fstrips import create_fstrips_problem, AddEffect +from tarski.fstrips import AddEffect, create_fstrips_problem from tarski.fstrips.ops import collect_all_symbols from tarski.grounding.ops import approximate_symbol_fluency from tarski.syntax import top -from ..common import parcprinter, gripper +from ..common import gripper, parcprinter def test_symbol_classification_in_parcprinter(): diff --git a/tests/fstrips/test_problem_grounding.py b/tests/fstrips/test_problem_grounding.py index 667f6ea8..ac5a9c76 100644 --- a/tests/fstrips/test_problem_grounding.py +++ b/tests/fstrips/test_problem_grounding.py @@ -1,15 +1,16 @@ from collections import OrderedDict -from tarski.fstrips import DelEffect, UniversalEffect, AddEffect +from tarski.benchmarks.blocksworld import generate_strips_blocksworld_problem +from tarski.fstrips import AddEffect, DelEffect, UniversalEffect from tarski.fstrips.action import PlainOperator from tarski.fstrips.representation import is_ground from tarski.grounding import ProblemGrounding -from tarski.grounding.lp_grounding import ground_problem_schemas_into_plain_operators +from tarski.grounding.lp_grounding import \ + ground_problem_schemas_into_plain_operators from tarski.syntax import symref -from tarski.syntax.transform.action_grounding import ground_schema_into_plain_operator, \ - ground_schema_into_plain_operator_from_grounding -from tarski.benchmarks.blocksworld import generate_strips_blocksworld_problem - +from tarski.syntax.transform.action_grounding import ( + ground_schema_into_plain_operator, + ground_schema_into_plain_operator_from_grounding) from tests.common import blocksworld diff --git a/tests/fstrips/test_representation.py b/tests/fstrips/test_representation.py index 19efa55d..90eabec3 100644 --- a/tests/fstrips/test_representation.py +++ b/tests/fstrips/test_representation.py @@ -1,15 +1,22 @@ import tarski.benchmarks.blocksworld +from tarski.benchmarks.blocksworld import ( + generate_fstrips_blocksworld_problem, generate_fstrips_bw_language, + generate_strips_blocksworld_problem) from tarski.benchmarks.counters import generate_fstrips_counters_problem -from tarski.fstrips.representation import collect_effect_free_parameters, project_away_effect_free_variables, \ - collect_effect_free_variables, project_away_effect_free_variables_from_problem, is_typed_problem, \ - identify_cost_related_functions, compute_delete_free_relaxation, is_delete_free, is_strips_problem, \ - is_conjunction_of_positive_atoms, is_strips_effect_set, compile_away_formula_negated_literals, \ - compile_action_negated_preconditions_away, compile_negated_preconditions_away, compute_complementary_atoms -from tarski.syntax import exists, land, neg, symref, substitute_expression, forall -from tarski.fstrips import representation as rep, AddEffect, DelEffect +from tarski.fstrips import AddEffect, DelEffect +from tarski.fstrips import representation as rep +from tarski.fstrips.representation import ( + collect_effect_free_parameters, collect_effect_free_variables, + compile_action_negated_preconditions_away, + compile_away_formula_negated_literals, compile_negated_preconditions_away, + compute_complementary_atoms, compute_delete_free_relaxation, + identify_cost_related_functions, is_conjunction_of_positive_atoms, + is_delete_free, is_strips_effect_set, is_strips_problem, is_typed_problem, + project_away_effect_free_variables, + project_away_effect_free_variables_from_problem) +from tarski.syntax import (exists, forall, land, neg, substitute_expression, + symref) from tarski.syntax.ops import flatten -from tarski.benchmarks.blocksworld import generate_fstrips_bw_language, generate_fstrips_blocksworld_problem, \ - generate_strips_blocksworld_problem from tests.io.common import parse_benchmark_instance diff --git a/tests/fstrips/test_simplify.py b/tests/fstrips/test_simplify.py index bd5e63f4..e6a65a31 100644 --- a/tests/fstrips/test_simplify.py +++ b/tests/fstrips/test_simplify.py @@ -2,8 +2,9 @@ from tarski.benchmarks.counters import generate_fstrips_counters_problem from tarski.fstrips import UniversalEffect from tarski.fstrips.manipulation import Simplify -from tarski.fstrips.manipulation.simplify import simplify_existential_quantification -from tarski.syntax import symref, land, lor, neg, bot, top, forall, exists +from tarski.fstrips.manipulation.simplify import \ + simplify_existential_quantification +from tarski.syntax import bot, exists, forall, land, lor, neg, symref, top def test_simplifier(): diff --git a/tests/fstrips/test_symbol_classification.py b/tests/fstrips/test_symbol_classification.py index df618105..6f55370b 100644 --- a/tests/fstrips/test_symbol_classification.py +++ b/tests/fstrips/test_symbol_classification.py @@ -1,9 +1,10 @@ import pytest + from tarski.grounding.ops import approximate_symbol_fluency from tarski.syntax.util import get_symbols -from ..io.common import reader, collect_strips_benchmarks, collect_fstrips_benchmarks - +from ..io.common import (collect_fstrips_benchmarks, collect_strips_benchmarks, + reader) SAMPLE_STRIPS_INSTANCES = [ "settlers-sat18-adl:p01.pddl", diff --git a/tests/fstrips/test_walker.py b/tests/fstrips/test_walker.py index 9fdc95da..ba10abf4 100644 --- a/tests/fstrips/test_walker.py +++ b/tests/fstrips/test_walker.py @@ -1,6 +1,5 @@ from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem - # def test_fstrips_problem_walker(): # problem = generate_fstrips_blocksworld_problem( # nblocks=2, diff --git a/tests/grounding/test_lp_grounding.py b/tests/grounding/test_lp_grounding.py index f5849e25..7d6f3d0f 100644 --- a/tests/grounding/test_lp_grounding.py +++ b/tests/grounding/test_lp_grounding.py @@ -1,4 +1,5 @@ import shutil + import pytest from tarski.grounding import LPGroundingStrategy, NaiveGroundingStrategy @@ -7,10 +8,11 @@ from tarski.reachability import create_reachability_lp from tarski.syntax import neg from tests.common.benchmarks import get_lenient_benchmarks - from tests.common.gripper import create_sample_problem from tests.common.simple import create_simple_problem -from ..io.common import reader, collect_strips_benchmarks, parse_benchmark_instance + +from ..io.common import (collect_strips_benchmarks, parse_benchmark_instance, + reader) if shutil.which("gringo") is None: pytest.skip('Install the Clingo ASP solver and put the "gringo" binary on your PATH in order to test ASP-based ' diff --git a/tests/grounding/test_naive_grounding.py b/tests/grounding/test_naive_grounding.py index 7e1fecef..531dead5 100644 --- a/tests/grounding/test_naive_grounding.py +++ b/tests/grounding/test_naive_grounding.py @@ -1,17 +1,21 @@ -from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem -from tarski.grounding import ProblemGrounding, NaiveGroundingStrategy, create_all_possible_state_variables +from tarski.benchmarks.blocksworld import ( + generate_fstrips_blocksworld_problem, generate_strips_blocksworld_problem) +from tarski.grounding import (NaiveGroundingStrategy, ProblemGrounding, + create_all_possible_state_variables) from tarski.grounding.naive import instantiation -from tarski.util import SymbolIndex -from tarski.syntax import create_substitution -from tarski.grounding.naive.sensors import SensorGrounder from tarski.grounding.naive.constraints import ConstraintGrounder -from tarski.grounding.naive.diff_constraints import DifferentialConstraintGrounder +from tarski.grounding.naive.diff_constraints import \ + DifferentialConstraintGrounder from tarski.grounding.naive.reactions import ReactionGrounder +from tarski.grounding.naive.sensors import SensorGrounder +from tarski.syntax import create_substitution +from tarski.util import SymbolIndex +from tests.common import parcprinter +from tests.common.blocksworld import create_4blocks_task from ..fstrips.contingent import localize -from ..fstrips.hybrid.tasks import create_particles_world, create_billiards_world -from tests.common.blocksworld import create_4blocks_task -from tests.common import parcprinter +from ..fstrips.hybrid.tasks import (create_billiards_world, + create_particles_world) def create_small_bw_with_index(): diff --git a/tests/io/test_builtin_domains_parsing.py b/tests/io/test_builtin_domains_parsing.py index ecc73d71..69b677eb 100644 --- a/tests/io/test_builtin_domains_parsing.py +++ b/tests/io/test_builtin_domains_parsing.py @@ -2,7 +2,6 @@ from .common import reader - _this_dir = os.path.dirname(os.path.realpath(__file__)) _data_dir = os.path.join(_this_dir, "..", "data", "pddl") diff --git a/tests/io/test_fstrips_full_domains_parsing.py b/tests/io/test_fstrips_full_domains_parsing.py index 784601fc..b9317344 100644 --- a/tests/io/test_fstrips_full_domains_parsing.py +++ b/tests/io/test_fstrips_full_domains_parsing.py @@ -1,7 +1,11 @@ -from tarski.fstrips.representation import is_unit_cost_problem, is_unit_cost_action, is_zero_cost_action, \ - is_constant_cost_action +from tarski.fstrips.representation import (is_constant_cost_action, + is_unit_cost_action, + is_unit_cost_problem, + is_zero_cost_action) from tests.common.benchmarks import get_lenient_benchmarks -from .common import reader, collect_strips_benchmarks, collect_fstrips_benchmarks, parse_benchmark_instance + +from .common import (collect_fstrips_benchmarks, collect_strips_benchmarks, + parse_benchmark_instance, reader) # Let's make sure we can correctly parse all benchmarks from the IPC competitions in 2008, 2011, 2014, 2018. # We have chosen optimal track benchmarks, which one would expect to be the smallest between optimal / satisficing diff --git a/tests/io/test_fstrips_parsing.py b/tests/io/test_fstrips_parsing.py index 246238e2..dace74f1 100644 --- a/tests/io/test_fstrips_parsing.py +++ b/tests/io/test_fstrips_parsing.py @@ -1,14 +1,14 @@ import pytest -from tarski.errors import UndefinedSort, UndefinedPredicate + +from tarski.errors import UndefinedPredicate, UndefinedSort from tarski.fstrips import AddEffect, FunctionalEffect from tarski.fstrips.errors import InvalidEffectError -from tarski.io.fstrips import ParsingError, FstripsReader +from tarski.io.fstrips import FstripsReader, ParsingError from tarski.syntax import Atom, CompoundFormula, Tautology from tarski.syntax.util import get_symbols from tarski.theories import Theory - from tests.common.spider import generate_spider_language -from tests.io.common import reader, parse_benchmark_instance +from tests.io.common import parse_benchmark_instance, reader def get_rule(name): diff --git a/tests/io/test_fstrips_writer.py b/tests/io/test_fstrips_writer.py index b7c73cb9..e9c73809 100644 --- a/tests/io/test_fstrips_writer.py +++ b/tests/io/test_fstrips_writer.py @@ -1,18 +1,21 @@ import tempfile -from typing import Optional, List +from typing import List, Optional import tarski.fstrips as fs from tarski.benchmarks.blocksworld import generate_fstrips_blocksworld_problem -from tarski.benchmarks.counters import get_counters_elements, generate_fstrips_counters_problem -from tarski.fstrips import AddEffect, DelEffect, FunctionalEffect, UniversalEffect +from tarski.benchmarks.counters import (generate_fstrips_counters_problem, + get_counters_elements) +from tarski.fstrips import (AddEffect, DelEffect, FunctionalEffect, + UniversalEffect) from tarski.io import FstripsWriter from tarski.io._fstrips.common import get_requirements_string -from tarski.io.fstrips import print_effects, print_effect, print_objects, print_metric, print_formula, print_term -from tarski.syntax import forall, exists, Constant +from tarski.io.fstrips import (print_effect, print_effects, print_formula, + print_metric, print_objects, print_term) +from tarski.syntax import Constant, exists, forall from tarski.theories import Theory - from tests.common import parcprinter from tests.io.common import reader + from ..common.gridworld import generate_small_gridworld diff --git a/tests/io/test_pddl_parsing.py b/tests/io/test_pddl_parsing.py index 8fd17ed6..2acdaa28 100644 --- a/tests/io/test_pddl_parsing.py +++ b/tests/io/test_pddl_parsing.py @@ -4,8 +4,9 @@ import tempfile import pytest -from tarski.io.pddl.lexer import PDDLlex + from tarski.io.pddl import Features +from tarski.io.pddl.lexer import PDDLlex from tarski.io.pddl.parser import PDDLparser, UnsupportedFeature from tarski.syntax.visitors import CollectEqualityAtoms diff --git a/tests/io/test_rddl_parsing.py b/tests/io/test_rddl_parsing.py index 5fe183c1..a0aeb771 100644 --- a/tests/io/test_rddl_parsing.py +++ b/tests/io/test_rddl_parsing.py @@ -1,5 +1,5 @@ -from tarski.syntax import * from tarski.io import rddl +from tarski.syntax import * def test_language_init_mars_rovers(): diff --git a/tests/io/test_rddl_writer.py b/tests/io/test_rddl_writer.py index 68f3cd20..17748db9 100644 --- a/tests/io/test_rddl_writer.py +++ b/tests/io/test_rddl_writer.py @@ -2,13 +2,13 @@ import tempfile import tarski -from tarski.theories import Theory -from tarski.syntax import * from tarski.io import rddl +from tarski.rddl import Task +from tarski.syntax import * from tarski.syntax.arithmetic import * -from tarski.syntax.arithmetic.special import * from tarski.syntax.arithmetic.random import * -from tarski.rddl import Task +from tarski.syntax.arithmetic.special import * +from tarski.theories import Theory def test_simple_rddl_model(): diff --git a/tests/ndl/test_temporal.py b/tests/ndl/test_temporal.py index a5dc57c7..31dae4a5 100644 --- a/tests/ndl/test_temporal.py +++ b/tests/ndl/test_temporal.py @@ -1,12 +1,12 @@ import pytest import tarski as tsk -from tarski.model import Model from tarski.evaluators.simple import evaluate -from tarski.syntax import forall, equiv, neg, land, exists -from tarski.theories import Theory +from tarski.model import Model from tarski.ndl import temporal -from tarski.ndl.temporal import TimedEffect, SetLiteralEffect +from tarski.ndl.temporal import SetLiteralEffect, TimedEffect +from tarski.syntax import equiv, exists, forall, land, neg +from tarski.theories import Theory def test_resource_lock_creation(): diff --git a/tests/reachability/test_reachability_lp.py b/tests/reachability/test_reachability_lp.py index eb0dcde8..72ce363b 100644 --- a/tests/reachability/test_reachability_lp.py +++ b/tests/reachability/test_reachability_lp.py @@ -1,6 +1,8 @@ -from tarski.reachability.asp import create_reachability_lp, LogicProgram, ReachabilityLPCompiler, LPAtom -from tarski.syntax import exists from tarski import fstrips as fs +from tarski.reachability.asp import (LogicProgram, LPAtom, + ReachabilityLPCompiler, + create_reachability_lp) +from tarski.syntax import exists from tests.io.common import parse_benchmark_instance from ..common.gripper import create_sample_problem diff --git a/tests/sas/test_action.py b/tests/sas/test_action.py index 47003746..df8922a2 100644 --- a/tests/sas/test_action.py +++ b/tests/sas/test_action.py @@ -1,9 +1,11 @@ +from itertools import combinations, permutations + import pytest + import tarski as tsk -from tarski.theories import Theory +from tarski.sas import Action, Effect, TemporalAction, Variable from tarski.syntax import land, symref -from tarski.sas import Effect, Action, Variable, TemporalAction -from itertools import combinations, permutations +from tarski.theories import Theory @pytest.mark.sas diff --git a/tests/search/test_search_models.py b/tests/search/test_search_models.py index a055fb1f..25eddf77 100644 --- a/tests/search/test_search_models.py +++ b/tests/search/test_search_models.py @@ -2,10 +2,12 @@ Tests for the Search module """ from tarski.benchmarks.blocksworld import generate_strips_blocksworld_problem -from tarski.grounding.lp_grounding import ground_problem_schemas_into_plain_operators -from tarski.search import GroundForwardSearchModel, BreadthFirstSearch +from tarski.grounding.lp_grounding import \ + ground_problem_schemas_into_plain_operators +from tarski.search import BreadthFirstSearch, GroundForwardSearchModel from tarski.search.model import progress -from tarski.syntax.transform.action_grounding import ground_schema_into_plain_operator_from_grounding +from tarski.syntax.transform.action_grounding import \ + ground_schema_into_plain_operator_from_grounding from tarski.utils import parse_model from tests.io.common import parse_benchmark_instance diff --git a/tests/transforms/test_syntax_transformations.py b/tests/transforms/test_syntax_transformations.py index a4363753..981cb124 100644 --- a/tests/transforms/test_syntax_transformations.py +++ b/tests/transforms/test_syntax_transformations.py @@ -3,15 +3,16 @@ import tarski.benchmarks.blocksworld from tarski.fstrips.representation import is_quantifier_free from tarski.syntax import * -from tests.common import tarskiworld - -from tarski.syntax.transform.nnf import NNFTransformation +from tarski.syntax.transform import (CNFTransformation, + NegatedBuiltinAbsorption, + QuantifierElimination, + QuantifierEliminationMode, + remove_quantifiers) from tarski.syntax.transform.cnf import to_conjunctive_normal_form_clauses -from tarski.syntax.transform.prenex import to_prenex_negation_normal_form -from tarski.syntax.transform import CNFTransformation, QuantifierElimination, remove_quantifiers, \ - QuantifierEliminationMode -from tarski.syntax.transform import NegatedBuiltinAbsorption from tarski.syntax.transform.errors import TransformationError +from tarski.syntax.transform.nnf import NNFTransformation +from tarski.syntax.transform.prenex import to_prenex_negation_normal_form +from tests.common import tarskiworld def test_nnf_conjunction(): From d43472f2a09a35f5b7dd08b2b908b41478612003 Mon Sep 17 00:00:00 2001 From: Guillem Frances Date: Fri, 11 Mar 2022 12:51:38 +0100 Subject: [PATCH 14/14] Fix imports --- src/tarski/fstrips/walker.py | 8 ++++---- src/tarski/io/sas/fd.py | 3 +-- src/tarski/sas/__init__.py | 1 - src/tarski/sas/util.py | 9 +++++---- src/tarski/syntax/transform/cnf.py | 2 +- tests/io/test_fd_sas_writer.py | 5 +++-- tests/sas/test_action.py | 14 ++++++++------ 7 files changed, 22 insertions(+), 20 deletions(-) diff --git a/src/tarski/fstrips/walker.py b/src/tarski/fstrips/walker.py index 075690ee..a40f9640 100644 --- a/src/tarski/fstrips/walker.py +++ b/src/tarski/fstrips/walker.py @@ -63,8 +63,8 @@ def run(self, expression, inplace=True): # Avoiding circular references: from ..syntax import ( # pylint: disable=import-outside-toplevel # Avoiding circular references Formula, Term) - from . import (Action, # pylint: disable=import-outside-toplevel - BaseEffect, Problem) + from . import Action # pylint: disable=import-outside-toplevel + from . import BaseEffect, Problem # Simply dispatch according to type expression = expression if inplace else copy.deepcopy(expression) @@ -98,8 +98,8 @@ def visit_action(self, node, inplace=False): return node def visit_effect(self, effect, inplace=True): - from . import (AddEffect, # pylint: disable=import-outside-toplevel - DelEffect, FunctionalEffect, UniversalEffect) + from . import AddEffect # pylint: disable=import-outside-toplevel + from . import DelEffect, FunctionalEffect, UniversalEffect effect = effect if inplace else copy.deepcopy(effect) if isinstance(effect, (AddEffect, DelEffect)): diff --git a/src/tarski/io/sas/fd.py b/src/tarski/io/sas/fd.py index c5f3fc55..a32ed527 100644 --- a/src/tarski/io/sas/fd.py +++ b/src/tarski/io/sas/fd.py @@ -7,9 +7,8 @@ # SAS instance writer for Fast Downward pre-processor # ---------------------------------------------------------------------------------------------------------------------- -from tarski.syntax import symref, CompoundTerm from tarski.io.sas.templates import * - +from tarski.syntax import CompoundTerm, symref FAST_DOWNWARD_SAS_VERSION = 4 diff --git a/src/tarski/sas/__init__.py b/src/tarski/sas/__init__.py index 1a404840..f383facb 100644 --- a/src/tarski/sas/__init__.py +++ b/src/tarski/sas/__init__.py @@ -9,7 +9,6 @@ from collections import namedtuple - Schema = namedtuple('Schema', ['name', 'variables', 'constraints', 'transitions']) Action = namedtuple('Action', ['name', 'arguments', 'transitions']) diff --git a/src/tarski/sas/util.py b/src/tarski/sas/util.py index 7936c526..30be08f6 100644 --- a/src/tarski/sas/util.py +++ b/src/tarski/sas/util.py @@ -7,14 +7,15 @@ # Utility method to process SAS # ---------------------------------------------------------------------------------------------------------------------- from itertools import product + import tarski.model from tarski.evaluators.simple import evaluate -from tarski.syntax import symref -from tarski.theories import Theory -from tarski.syntax.transform.substitutions import substitute_expression, create_substitution - from tarski.sas import Action from tarski.sas.temporal import TemporalAction +from tarski.syntax import symref +from tarski.syntax.transform.substitutions import (create_substitution, + substitute_expression) +from tarski.theories import Theory def check_constraints(C, s, subst): diff --git a/src/tarski/syntax/transform/cnf.py b/src/tarski/syntax/transform/cnf.py index b204d6a3..3d6e1ab4 100644 --- a/src/tarski/syntax/transform/cnf.py +++ b/src/tarski/syntax/transform/cnf.py @@ -1,8 +1,8 @@ """ CNF Transformation """ +from .nnf import to_negation_normal_form from ..formulas import CompoundFormula, Connective, QuantifiedFormula -from ..transform import to_negation_normal_form from .errors import TransformationError diff --git a/tests/io/test_fd_sas_writer.py b/tests/io/test_fd_sas_writer.py index c2894839..8bea67de 100644 --- a/tests/io/test_fd_sas_writer.py +++ b/tests/io/test_fd_sas_writer.py @@ -1,11 +1,12 @@ import pytest import tarski as tsk -from tarski.theories import Theory -from tarski.syntax import land, symref from tarski.io.sas.fd import Writer +from tarski.syntax import land, symref +from tarski.theories import Theory from tarski.util import SymbolIndex + @pytest.mark.sas def test_gripper_instance(): """ diff --git a/tests/sas/test_action.py b/tests/sas/test_action.py index c7bb7a27..6aea2b32 100644 --- a/tests/sas/test_action.py +++ b/tests/sas/test_action.py @@ -1,13 +1,15 @@ +from itertools import combinations, permutations + import pytest + import tarski as tsk -from tarski.theories import Theory -from tarski.syntax import land, symref -from tarski.sas import Schema, Action -from tarski.sas.temporal import TemporalAction -from tarski.sas.util import ground_temporal_action -from itertools import combinations, permutations import tarski.model from tarski.evaluators.simple import evaluate +from tarski.sas import Action, Schema +from tarski.sas.temporal import TemporalAction +from tarski.sas.util import ground_temporal_action +from tarski.syntax import land, symref +from tarski.theories import Theory @pytest.mark.sas