From b9715ceccf330c453de73e024ff035b00ef0b7e6 Mon Sep 17 00:00:00 2001 From: nate nowack Date: Fri, 9 Feb 2024 18:09:07 -0500 Subject: [PATCH] adopt ruff formatter (#11959) --- .pre-commit-config.yaml | 5 +- .ruff.toml | 8 +- .../_internal/concurrency/cancellation.py | 4 +- .../_internal/concurrency/event_loop.py | 4 +- .../pydantic/annotations/pendulum.py | 8 +- src/prefect/_internal/pydantic/v2_schema.py | 4 +- src/prefect/_vendor/fastapi/applications.py | 2 +- .../_vendor/fastapi/dependencies/utils.py | 2 +- src/prefect/_vendor/fastapi/encoders.py | 2 +- src/prefect/_vendor/fastapi/routing.py | 12 +- src/prefect/_vendor/fastapi/utils.py | 6 +- .../_vendor/starlette/authentication.py | 2 +- src/prefect/_vendor/starlette/routing.py | 6 +- src/prefect/blocks/core.py | 6 +- src/prefect/blocks/notifications.py | 16 +-- src/prefect/cli/_prompts.py | 12 +- src/prefect/cli/dev.py | 3 +- src/prefect/cli/flow_run.py | 2 +- src/prefect/cli/profile.py | 2 +- src/prefect/context.py | 6 +- src/prefect/engine.py | 3 +- src/prefect/filesystems.py | 18 +-- src/prefect/futures.py | 2 +- src/prefect/infrastructure/container.py | 6 +- src/prefect/infrastructure/kubernetes.py | 10 +- src/prefect/infrastructure/process.py | 6 +- src/prefect/input/run_input.py | 2 +- src/prefect/logging/formatters.py | 2 +- src/prefect/runner/server.py | 6 +- src/prefect/server/api/dependencies.py | 2 +- src/prefect/server/api/deployments.py | 24 ++-- src/prefect/server/database/orm_models.py | 3 +- .../server/database/query_components.py | 7 +- src/prefect/server/models/agents.py | 3 +- src/prefect/server/models/block_schemas.py | 10 +- src/prefect/server/models/flow_runs.py | 3 +- src/prefect/server/models/flows.py | 3 +- src/prefect/server/models/task_runs.py | 3 +- .../server/orchestration/dependencies.py | 24 ++-- src/prefect/server/utilities/database.py | 6 +- src/prefect/settings.py | 4 +- src/prefect/software/pip.py | 2 +- .../standard_test_suites/task_runners.py | 6 +- src/prefect/utilities/asyncutils.py | 2 +- src/prefect/utilities/collections.py | 2 +- .../concurrency/test_cancellation.py | 10 +- tests/agent/test_agent_run_cancellation.py | 3 +- tests/agent/test_agent_run_submission.py | 3 +- tests/cli/test_deploy.py | 3 +- tests/cli/test_work_pool.py | 6 +- tests/client/test_base_client.py | 7 +- tests/client/test_prefect_client.py | 24 ++-- tests/deployment/test_steps.py | 3 +- tests/infrastructure/provisioners/test_ecs.py | 5 +- tests/infrastructure/test_docker_container.py | 3 +- tests/runner/test_storage.py | 3 +- tests/runner/test_webserver.py | 3 +- tests/server/api/test_deployments.py | 3 +- tests/server/api/test_workers.py | 6 +- tests/server/models/test_variables.py | 12 +- tests/server/orchestration/test_rules.py | 10 +- tests/test_context.py | 6 +- tests/test_deployments.py | 17 +-- tests/test_engine.py | 21 ++-- tests/test_flows.py | 3 +- tests/test_plugins.py | 18 +-- tests/utilities/test_filesystem.py | 2 +- tests/workers/test_base_worker.py | 105 +++++++++--------- versioneer.py | 4 +- 69 files changed, 272 insertions(+), 279 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a71c2eb9087a..5dee5a0a405b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,13 @@ repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.0.262" + rev: "v0.2.1" hooks: + - id: ruff-format - id: ruff language_version: python3 args: [--fix, --exit-non-zero-on-fix, --show-fixes] - repo: https://github.com/codespell-project/codespell - rev: v2.2.5 + rev: v2.2.6 hooks: - id: codespell exclude: package-lock.json|_vendor/.* diff --git a/.ruff.toml b/.ruff.toml index f26004b8d2e9..92dc7ca191d8 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -1,12 +1,12 @@ src = ["src"] # Use Ruff for sorting imports -extend-select = ["I"] +lint.extend-select = ["I"] # Do not enforce line length; black does this for code and we do not care about comments / docs -ignore = ["E501"] +lint.ignore = ["E501"] -[per-file-ignores] +[lint.per-file-ignores] # Do not enforce usage and import order rules in init files "__init__.py" = ["E402", "F401", "I"] @@ -22,5 +22,5 @@ ignore = ["E501"] # Do not enforce line length limits in migrations "src/prefect/server/database/migrations/**/*" = ["E501"] -[isort] +[lint.isort] known-third-party = ["prefect._vendor"] diff --git a/src/prefect/_internal/concurrency/cancellation.py b/src/prefect/_internal/concurrency/cancellation.py index 013db9432b44..8dcb4e6519de 100644 --- a/src/prefect/_internal/concurrency/cancellation.py +++ b/src/prefect/_internal/concurrency/cancellation.py @@ -230,7 +230,9 @@ def __repr__(self) -> str: else ( "cancelled" if self._cancelled - else "running" if self._started else "pending" + else "running" + if self._started + else "pending" ) ).upper() timeout = f", timeout={self._timeout:.2f}" if self._timeout else "" diff --git a/src/prefect/_internal/concurrency/event_loop.py b/src/prefect/_internal/concurrency/event_loop.py index 3f4f187231ac..024fe877173a 100644 --- a/src/prefect/_internal/concurrency/event_loop.py +++ b/src/prefect/_internal/concurrency/event_loop.py @@ -29,7 +29,7 @@ def call_in_loop( __loop: asyncio.AbstractEventLoop, __fn: Callable[P, T], *args: P.args, - **kwargs: P.kwargs + **kwargs: P.kwargs, ) -> T: """ Run a synchronous call in event loop's thread from another thread. @@ -49,7 +49,7 @@ def call_soon_in_loop( __loop: asyncio.AbstractEventLoop, __fn: Callable[P, T], *args: P.args, - **kwargs: P.kwargs + **kwargs: P.kwargs, ) -> concurrent.futures.Future: """ Run a synchronous call in an event loop's thread from another thread. diff --git a/src/prefect/_internal/pydantic/annotations/pendulum.py b/src/prefect/_internal/pydantic/annotations/pendulum.py index cc1a80742160..0af0a3149b39 100644 --- a/src/prefect/_internal/pydantic/annotations/pendulum.py +++ b/src/prefect/_internal/pydantic/annotations/pendulum.py @@ -13,9 +13,9 @@ class _PendulumDateTimeAnnotation: - _pendulum_type: t.Type[t.Union[pendulum.DateTime, pendulum.Date, pendulum.Time]] = ( - pendulum.DateTime - ) + _pendulum_type: t.Type[ + t.Union[pendulum.DateTime, pendulum.Date, pendulum.Time] + ] = pendulum.DateTime _pendulum_types_to_schemas = { pendulum.DateTime: core_schema.datetime_schema(), @@ -35,7 +35,7 @@ def validate_from_str( return pendulum.parse(value) def to_str( - value: t.Union[pendulum.DateTime, pendulum.Date, pendulum.Time] + value: t.Union[pendulum.DateTime, pendulum.Date, pendulum.Time], ) -> str: return value.isoformat() diff --git a/src/prefect/_internal/pydantic/v2_schema.py b/src/prefect/_internal/pydantic/v2_schema.py index d315b8197d98..6d76d13fc05c 100644 --- a/src/prefect/_internal/pydantic/v2_schema.py +++ b/src/prefect/_internal/pydantic/v2_schema.py @@ -59,7 +59,7 @@ def process_v2_params( *, position: int, docstrings: t.Dict[str, str], - aliases: t.Dict + aliases: t.Dict, ) -> t.Tuple[str, t.Any, "pydantic.Field"]: """ Generate a sanitized name, type, and pydantic.Field for a given parameter. @@ -98,7 +98,7 @@ def create_v2_schema( name_: str, model_cfg: t.Optional[ConfigDict] = None, model_base: t.Optional[t.Type[V2BaseModel]] = None, - **model_fields + **model_fields, ): """ Create a pydantic v2 model and craft a v1 compatible schema from it. diff --git a/src/prefect/_vendor/fastapi/applications.py b/src/prefect/_vendor/fastapi/applications.py index 87e258d07dd3..e7c6856388a9 100644 --- a/src/prefect/_vendor/fastapi/applications.py +++ b/src/prefect/_vendor/fastapi/applications.py @@ -150,7 +150,7 @@ def __init__( ) self.exception_handlers: Dict[ Any, Callable[[Request, Any], Union[Response, Awaitable[Response]]] - ] = ({} if exception_handlers is None else dict(exception_handlers)) + ] = {} if exception_handlers is None else dict(exception_handlers) self.exception_handlers.setdefault(HTTPException, http_exception_handler) self.exception_handlers.setdefault( RequestValidationError, request_validation_exception_handler diff --git a/src/prefect/_vendor/fastapi/dependencies/utils.py b/src/prefect/_vendor/fastapi/dependencies/utils.py index 67b96eafab6e..11c3c3d5ac94 100644 --- a/src/prefect/_vendor/fastapi/dependencies/utils.py +++ b/src/prefect/_vendor/fastapi/dependencies/utils.py @@ -803,7 +803,7 @@ async def request_body_to_args( results: List[Union[bytes, str]] = [] async def process_fn( - fn: Callable[[], Coroutine[Any, Any, Any]] + fn: Callable[[], Coroutine[Any, Any, Any]], ) -> None: result = await fn() results.append(result) # noqa: B023 diff --git a/src/prefect/_vendor/fastapi/encoders.py b/src/prefect/_vendor/fastapi/encoders.py index c24d7c6ca546..a07240356688 100644 --- a/src/prefect/_vendor/fastapi/encoders.py +++ b/src/prefect/_vendor/fastapi/encoders.py @@ -19,7 +19,7 @@ def generate_encoders_by_class_tuples( - type_encoder_map: Dict[Any, Callable[[Any], Any]] + type_encoder_map: Dict[Any, Callable[[Any], Any]], ) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( tuple diff --git a/src/prefect/_vendor/fastapi/routing.py b/src/prefect/_vendor/fastapi/routing.py index 093c3abd50f5..6ae0f5372886 100644 --- a/src/prefect/_vendor/fastapi/routing.py +++ b/src/prefect/_vendor/fastapi/routing.py @@ -408,9 +408,9 @@ def __init__( methods = ["GET"] self.methods: Set[str] = {method.upper() for method in methods} if isinstance(generate_unique_id_function, DefaultPlaceholder): - current_generate_unique_id: Callable[["APIRoute"], str] = ( - generate_unique_id_function.value - ) + current_generate_unique_id: Callable[ + ["APIRoute"], str + ] = generate_unique_id_function.value else: current_generate_unique_id = generate_unique_id_function self.unique_id = self.operation_id or current_generate_unique_id(self) @@ -433,9 +433,9 @@ def __init__( # would pass the validation and be returned as is. # By being a new field, no inheritance will be passed as is. A new model # will be always created. - self.secure_cloned_response_field: Optional[ModelField] = ( - create_cloned_field(self.response_field) - ) + self.secure_cloned_response_field: Optional[ + ModelField + ] = create_cloned_field(self.response_field) else: self.response_field = None # type: ignore self.secure_cloned_response_field = None diff --git a/src/prefect/_vendor/fastapi/utils.py b/src/prefect/_vendor/fastapi/utils.py index 4198b6543392..53f8ca6ba88a 100644 --- a/src/prefect/_vendor/fastapi/utils.py +++ b/src/prefect/_vendor/fastapi/utils.py @@ -39,9 +39,9 @@ from .routing import APIRoute # Cache for `create_cloned_field` -_CLONED_TYPES_CACHE: MutableMapping[Type[BaseModel], Type[BaseModel]] = ( - WeakKeyDictionary() -) +_CLONED_TYPES_CACHE: MutableMapping[ + Type[BaseModel], Type[BaseModel] +] = WeakKeyDictionary() def is_body_allowed_for_status_code(status_code: Union[int, str, None]) -> bool: diff --git a/src/prefect/_vendor/starlette/authentication.py b/src/prefect/_vendor/starlette/authentication.py index 965020a11883..2ea04623cc0d 100644 --- a/src/prefect/_vendor/starlette/authentication.py +++ b/src/prefect/_vendor/starlette/authentication.py @@ -35,7 +35,7 @@ def requires( scopes_list = [scopes] if isinstance(scopes, str) else list(scopes) def decorator( - func: typing.Callable[_P, typing.Any] + func: typing.Callable[_P, typing.Any], ) -> typing.Callable[_P, typing.Any]: sig = inspect.signature(func) for idx, parameter in enumerate(sig.parameters.values()): diff --git a/src/prefect/_vendor/starlette/routing.py b/src/prefect/_vendor/starlette/routing.py index fad88f79aa74..1cbc4c948764 100644 --- a/src/prefect/_vendor/starlette/routing.py +++ b/src/prefect/_vendor/starlette/routing.py @@ -59,7 +59,9 @@ def iscoroutinefunction_or_partial(obj: typing.Any) -> bool: # pragma: no cover def request_response( - func: typing.Callable[[Request], typing.Union[typing.Awaitable[Response], Response]] + func: typing.Callable[ + [Request], typing.Union[typing.Awaitable[Response], Response] + ], ) -> ASGIApp: """ Takes a function or coroutine `func(request) -> response`, @@ -82,7 +84,7 @@ async def app(scope: Scope, receive: Receive, send: Send) -> None: def websocket_session( - func: typing.Callable[[WebSocket], typing.Awaitable[None]] + func: typing.Callable[[WebSocket], typing.Awaitable[None]], ) -> ASGIApp: """ Takes a coroutine `func(session)`, and returns an ASGI application. diff --git a/src/prefect/blocks/core.py b/src/prefect/blocks/core.py index 36e4229790d9..07895426b634 100644 --- a/src/prefect/blocks/core.py +++ b/src/prefect/blocks/core.py @@ -257,9 +257,9 @@ def schema_extra(schema: Dict[str, Any], model: Type["Block"]): type_._to_block_schema_reference_dict(), ] else: - refs[field.name] = ( - type_._to_block_schema_reference_dict() - ) + refs[ + field.name + ] = type_._to_block_schema_reference_dict() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/src/prefect/blocks/notifications.py b/src/prefect/blocks/notifications.py index 6d0c59714112..f86fd7911a99 100644 --- a/src/prefect/blocks/notifications.py +++ b/src/prefect/blocks/notifications.py @@ -24,14 +24,14 @@ class AbstractAppriseNotificationBlock(NotificationBlock, ABC): An abstract class for sending notifications using Apprise. """ - notify_type: Literal["prefect_default", "info", "success", "warning", "failure"] = ( - Field( - default=PREFECT_NOTIFY_TYPE_DEFAULT, - description=( - "The type of notification being performed; the prefect_default " - "is a plain notification that does not attach an image." - ), - ) + notify_type: Literal[ + "prefect_default", "info", "success", "warning", "failure" + ] = Field( + default=PREFECT_NOTIFY_TYPE_DEFAULT, + description=( + "The type of notification being performed; the prefect_default " + "is a plain notification that does not attach an image." + ), ) def __init__(self, *args, **kwargs): diff --git a/src/prefect/cli/_prompts.py b/src/prefect/cli/_prompts.py index 8500c73774d1..df1700ba92d3 100644 --- a/src/prefect/cli/_prompts.py +++ b/src/prefect/cli/_prompts.py @@ -487,14 +487,14 @@ async def prompt_push_custom_docker_image( import prefect_docker credentials_block = prefect_docker.DockerRegistryCredentials - push_step["credentials"] = ( - "{{ prefect_docker.docker-registry-credentials.docker_registry_creds_name }}" - ) + push_step[ + "credentials" + ] = "{{ prefect_docker.docker-registry-credentials.docker_registry_creds_name }}" else: credentials_block = DockerRegistry - push_step["credentials"] = ( - "{{ prefect.docker-registry.docker_registry_creds_name }}" - ) + push_step[ + "credentials" + ] = "{{ prefect.docker-registry.docker_registry_creds_name }}" docker_registry_creds_name = f"deployment-{slugify(deployment_config['name'])}-{slugify(deployment_config['work_pool']['name'])}-registry-creds" create_new_block = False try: diff --git a/src/prefect/cli/dev.py b/src/prefect/cli/dev.py index de1472c63146..893d9fee57d0 100644 --- a/src/prefect/cli/dev.py +++ b/src/prefect/cli/dev.py @@ -233,7 +233,8 @@ async def api( try: server_pid = await tg.start(start_command) async for _ in watchfiles.awatch( - prefect.__module_path__, stop_event=stop_event # type: ignore + prefect.__module_path__, + stop_event=stop_event, # type: ignore ): # when any watched files change, restart the server app.console.print("Restarting Prefect Server...") diff --git a/src/prefect/cli/flow_run.py b/src/prefect/cli/flow_run.py index af53ff70a2cb..a58868de01ae 100644 --- a/src/prefect/cli/flow_run.py +++ b/src/prefect/cli/flow_run.py @@ -268,7 +268,7 @@ async def logs( @flow_run_app.command() async def execute( - id: Optional[UUID] = typer.Argument(None, help="ID of the flow run to execute") + id: Optional[UUID] = typer.Argument(None, help="ID of the flow run to execute"), ): if id is None: environ_flow_id = os.environ.get("PREFECT__FLOW_RUN_ID") diff --git a/src/prefect/cli/profile.py b/src/prefect/cli/profile.py index f06dbfc7c307..c2065b74bb5d 100644 --- a/src/prefect/cli/profile.py +++ b/src/prefect/cli/profile.py @@ -225,7 +225,7 @@ def rename(name: str, new_name: str): def inspect( name: Optional[str] = typer.Argument( None, help="Name of profile to inspect; defaults to active profile." - ) + ), ): """ Display settings from a given profile; defaults to active. diff --git a/src/prefect/context.py b/src/prefect/context.py index e70df4151314..1f755d69fd7b 100644 --- a/src/prefect/context.py +++ b/src/prefect/context.py @@ -137,9 +137,9 @@ class PrefectObjectRegistry(ContextModel): ) # Failures will be a tuple of (exception, instance, args, kwargs) - _instance_init_failures: Dict[Type[T], List[Tuple[Exception, T, Tuple, Dict]]] = ( - PrivateAttr(default_factory=lambda: defaultdict(list)) - ) + _instance_init_failures: Dict[ + Type[T], List[Tuple[Exception, T, Tuple, Dict]] + ] = PrivateAttr(default_factory=lambda: defaultdict(list)) block_code_execution: bool = False capture_failures: bool = False diff --git a/src/prefect/engine.py b/src/prefect/engine.py index 713cd6e85f5f..19ef5fd78b95 100644 --- a/src/prefect/engine.py +++ b/src/prefect/engine.py @@ -2500,8 +2500,7 @@ def resolve_input(expr, context): # incorrectly evaluate to false — to resolve this, we must track all # annotations wrapping the current expression but this is not yet # implemented. - isinstance(context.get("annotation"), allow_failure) - and state.is_failed() + isinstance(context.get("annotation"), allow_failure) and state.is_failed() ): raise UpstreamTaskError( f"Upstream task run '{state.state_details.task_run_id}' did not reach a" diff --git a/src/prefect/filesystems.py b/src/prefect/filesystems.py index 7f7eaaaf2972..a05e2d64383b 100644 --- a/src/prefect/filesystems.py +++ b/src/prefect/filesystems.py @@ -709,13 +709,13 @@ def basepath(self) -> str: def filesystem(self) -> RemoteFileSystem: settings = {} if self.azure_storage_connection_string: - settings["connection_string"] = ( - self.azure_storage_connection_string.get_secret_value() - ) + settings[ + "connection_string" + ] = self.azure_storage_connection_string.get_secret_value() if self.azure_storage_account_name: - settings["account_name"] = ( - self.azure_storage_account_name.get_secret_value() - ) + settings[ + "account_name" + ] = self.azure_storage_account_name.get_secret_value() if self.azure_storage_account_key: settings["account_key"] = self.azure_storage_account_key.get_secret_value() if self.azure_storage_tenant_id: @@ -723,9 +723,9 @@ def filesystem(self) -> RemoteFileSystem: if self.azure_storage_client_id: settings["client_id"] = self.azure_storage_client_id.get_secret_value() if self.azure_storage_client_secret: - settings["client_secret"] = ( - self.azure_storage_client_secret.get_secret_value() - ) + settings[ + "client_secret" + ] = self.azure_storage_client_secret.get_secret_value() settings["anon"] = self.azure_storage_anon self._remote_file_system = RemoteFileSystem( basepath=self.basepath, settings=settings diff --git a/src/prefect/futures.py b/src/prefect/futures.py index 09d5d67af15d..dc81b54c9b2f 100644 --- a/src/prefect/futures.py +++ b/src/prefect/futures.py @@ -360,7 +360,7 @@ def replace_futures_with_results(expr, context): async def resolve_futures_to_states( - expr: Union[PrefectFuture[R, Any], Any] + expr: Union[PrefectFuture[R, Any], Any], ) -> Union[State[R], Any]: """ Given a Python built-in collection, recursively find `PrefectFutures` and build a diff --git a/src/prefect/infrastructure/container.py b/src/prefect/infrastructure/container.py index 82541eda929f..0f0e23e4d0bf 100644 --- a/src/prefect/infrastructure/container.py +++ b/src/prefect/infrastructure/container.py @@ -398,9 +398,9 @@ async def generate_work_pool_base_job_template(self): return await super().generate_work_pool_base_job_template() for key, value in self.dict(exclude_unset=True, exclude_defaults=True).items(): if key == "command": - base_job_template["variables"]["properties"]["command"]["default"] = ( - shlex.join(value) - ) + base_job_template["variables"]["properties"]["command"][ + "default" + ] = shlex.join(value) elif key == "image_registry": self.logger.warning( "Image registry blocks are not supported by Docker" diff --git a/src/prefect/infrastructure/kubernetes.py b/src/prefect/infrastructure/kubernetes.py index a4c89729fa9d..45418134507c 100644 --- a/src/prefect/infrastructure/kubernetes.py +++ b/src/prefect/infrastructure/kubernetes.py @@ -376,9 +376,9 @@ async def generate_work_pool_base_job_template(self): ), "Failed to retrieve default base job template." for key, value in self.dict(exclude_unset=True, exclude_defaults=True).items(): if key == "command": - base_job_template["variables"]["properties"]["command"]["default"] = ( - shlex.join(value) - ) + base_job_template["variables"]["properties"]["command"][ + "default" + ] = shlex.join(value) elif key in [ "type", "block_type_slug", @@ -892,9 +892,7 @@ def _slugify_label_key(self, key: str) -> str: prefix, max_length=253, regex_pattern=r"[^a-zA-Z0-9-\.]+", - ).strip( - "_-." - ) # Must start or end with alphanumeric characters + ).strip("_-.") # Must start or end with alphanumeric characters or prefix ) diff --git a/src/prefect/infrastructure/process.py b/src/prefect/infrastructure/process.py index 3b75bd5c76e5..c600778ba7cb 100644 --- a/src/prefect/infrastructure/process.py +++ b/src/prefect/infrastructure/process.py @@ -264,9 +264,9 @@ async def generate_work_pool_base_job_template(self): ), "Failed to generate default base job template for Process worker." for key, value in self.dict(exclude_unset=True, exclude_defaults=True).items(): if key == "command": - base_job_template["variables"]["properties"]["command"]["default"] = ( - shlex.join(value) - ) + base_job_template["variables"]["properties"]["command"][ + "default" + ] = shlex.join(value) elif key in [ "type", "block_type_slug", diff --git a/src/prefect/input/run_input.py b/src/prefect/input/run_input.py index 3085c1f4a79b..55129059d8ed 100644 --- a/src/prefect/input/run_input.py +++ b/src/prefect/input/run_input.py @@ -379,7 +379,7 @@ def receive(cls, *args, **kwargs): def run_input_subclass_from_type( - _type: Union[Type[R], Type[T], pydantic.BaseModel] + _type: Union[Type[R], Type[T], pydantic.BaseModel], ) -> Union[Type[AutomaticRunInput[T]], Type[R]]: """ Create a new `RunInput` subclass from the given type. diff --git a/src/prefect/logging/formatters.py b/src/prefect/logging/formatters.py index 3aa7f54b60cc..a846d6d5373e 100644 --- a/src/prefect/logging/formatters.py +++ b/src/prefect/logging/formatters.py @@ -80,7 +80,7 @@ def __init__( *, defaults=None, task_run_fmt: str = None, - flow_run_fmt: str = None + flow_run_fmt: str = None, ) -> None: """ Implementation of the standard Python formatter with support for multiple diff --git a/src/prefect/runner/server.py b/src/prefect/runner/server.py index 91e6c3240695..1e76e2a2ec86 100644 --- a/src/prefect/runner/server.py +++ b/src/prefect/runner/server.py @@ -139,9 +139,9 @@ async def get_deployment_router( ) # Used for updating the route schemas later on - schemas[f"{deployment.name}-{deployment_id}"] = ( - deployment.parameter_openapi_schema - ) + schemas[ + f"{deployment.name}-{deployment_id}" + ] = deployment.parameter_openapi_schema schemas[deployment_id] = deployment.name return router, schemas diff --git a/src/prefect/server/api/dependencies.py b/src/prefect/server/api/dependencies.py index 25e4e898cc03..aa197adda797 100644 --- a/src/prefect/server/api/dependencies.py +++ b/src/prefect/server/api/dependencies.py @@ -99,7 +99,7 @@ def get_limit( limit: int = Body( None, description="Defaults to PREFECT_API_DEFAULT_LIMIT if not provided.", - ) + ), ): default_limit = PREFECT_API_DEFAULT_LIMIT.value() limit = limit if limit is not None else default_limit diff --git a/src/prefect/server/api/deployments.py b/src/prefect/server/api/deployments.py index 5dfc506033fb..69b8b8e9e80d 100644 --- a/src/prefect/server/api/deployments.py +++ b/src/prefect/server/api/deployments.py @@ -67,22 +67,22 @@ async def create_deployment( if deployment.work_pool_name and deployment.work_queue_name: # If a specific pool name/queue name combination was provided, get the # ID for that work pool queue. - deployment_dict["work_queue_id"] = ( - await worker_lookups._get_work_queue_id_from_name( - session=session, - work_pool_name=deployment.work_pool_name, - work_queue_name=deployment.work_queue_name, - create_queue_if_not_found=True, - ) + deployment_dict[ + "work_queue_id" + ] = await worker_lookups._get_work_queue_id_from_name( + session=session, + work_pool_name=deployment.work_pool_name, + work_queue_name=deployment.work_queue_name, + create_queue_if_not_found=True, ) elif deployment.work_pool_name: # If just a pool name was provided, get the ID for its default # work pool queue. - deployment_dict["work_queue_id"] = ( - await worker_lookups._get_default_work_queue_id_from_work_pool_name( - session=session, - work_pool_name=deployment.work_pool_name, - ) + deployment_dict[ + "work_queue_id" + ] = await worker_lookups._get_default_work_queue_id_from_work_pool_name( + session=session, + work_pool_name=deployment.work_pool_name, ) elif deployment.work_queue_name: # If just a queue name was provided, ensure that the queue exists and diff --git a/src/prefect/server/database/orm_models.py b/src/prefect/server/database/orm_models.py index 2ae491f1f9e3..b809ff1af5a8 100644 --- a/src/prefect/server/database/orm_models.py +++ b/src/prefect/server/database/orm_models.py @@ -435,7 +435,8 @@ def estimated_run_time(cls): ) # add a correlate statement so this can reuse the `FROM` clause # of any parent query - .correlate(cls).label("estimated_run_time") + .correlate(cls) + .label("estimated_run_time") ) @hybrid_property diff --git a/src/prefect/server/database/query_components.py b/src/prefect/server/database/query_components.py index 0768806a021f..f5676f9ed9dc 100644 --- a/src/prefect/server/database/query_components.py +++ b/src/prefect/server/database/query_components.py @@ -366,7 +366,8 @@ async def get_scheduled_flow_runs_from_work_pool( sa.column("run_work_pool_id"), sa.column("run_work_queue_id"), db.FlowRun, - ).from_statement(query) + ) + .from_statement(query) # indicate that the state relationship isn't being loaded .options(sa.orm.noload(db.FlowRun.state)) ) @@ -1255,7 +1256,7 @@ async def flow_run_graph_v2( # help smooth over those differences. def edges( - value: Union[str, Sequence[UUID], Sequence[str], None] + value: Union[str, Sequence[UUID], Sequence[str], None], ) -> List[UUID]: if not value: return [] @@ -1264,7 +1265,7 @@ def edges( return [Edge(id=id) for id in value] def time( - value: Union[str, datetime.datetime, None] + value: Union[str, datetime.datetime, None], ) -> Optional[pendulum.DateTime]: if not value: return None diff --git a/src/prefect/server/models/agents.py b/src/prefect/server/models/agents.py index ae86fe0e545a..78bcf198407a 100644 --- a/src/prefect/server/models/agents.py +++ b/src/prefect/server/models/agents.py @@ -107,7 +107,8 @@ async def update_agent( """ update_stmt = ( - sa.update(db.Agent).where(db.Agent.id == agent_id) + sa.update(db.Agent) + .where(db.Agent.id == agent_id) # exclude_unset=True allows us to only update values provided by # the user, ignoring any defaults on the model .values(**agent.dict(shallow=True, exclude_unset=True)) diff --git a/src/prefect/server/models/block_schemas.py b/src/prefect/server/models/block_schemas.py index febeb17fcc85..f807f40505c6 100644 --- a/src/prefect/server/models/block_schemas.py +++ b/src/prefect/server/models/block_schemas.py @@ -75,10 +75,10 @@ async def create_block_schema( insert_values["fields"], definitions ) if non_block_definitions: - insert_values["fields"]["definitions"] = ( - _get_non_block_reference_definitions( - insert_values["fields"], definitions - ) + insert_values["fields"][ + "definitions" + ] = _get_non_block_reference_definitions( + insert_values["fields"], definitions ) else: # Prevent storing definitions for blocks. Those are reconstructed on read. @@ -393,7 +393,7 @@ def _construct_full_block_schema( def _find_root_block_schema( block_schemas_with_references: List[ Tuple[BlockSchema, Optional[str], Optional[UUID]] - ] + ], ): """ Attempts to find the root block schema from a list of block schemas diff --git a/src/prefect/server/models/flow_runs.py b/src/prefect/server/models/flow_runs.py index f854901e8b1c..48389299d665 100644 --- a/src/prefect/server/models/flow_runs.py +++ b/src/prefect/server/models/flow_runs.py @@ -133,7 +133,8 @@ async def update_flow_run( bool: whether or not matching rows were found to update """ update_stmt = ( - sa.update(db.FlowRun).where(db.FlowRun.id == flow_run_id) + sa.update(db.FlowRun) + .where(db.FlowRun.id == flow_run_id) # exclude_unset=True allows us to only update values provided by # the user, ignoring any defaults on the model .values(**flow_run.dict(shallow=True, exclude_unset=True)) diff --git a/src/prefect/server/models/flows.py b/src/prefect/server/models/flows.py index 13604722b4e9..f6d411c5000f 100644 --- a/src/prefect/server/models/flows.py +++ b/src/prefect/server/models/flows.py @@ -71,7 +71,8 @@ async def update_flow( bool: whether or not matching rows were found to update """ update_stmt = ( - sa.update(db.Flow).where(db.Flow.id == flow_id) + sa.update(db.Flow) + .where(db.Flow.id == flow_id) # exclude_unset=True allows us to only update values provided by # the user, ignoring any defaults on the model .values(**flow.dict(shallow=True, exclude_unset=True)) diff --git a/src/prefect/server/models/task_runs.py b/src/prefect/server/models/task_runs.py index 1c9128e1b41e..ea92f5c40eab 100644 --- a/src/prefect/server/models/task_runs.py +++ b/src/prefect/server/models/task_runs.py @@ -139,7 +139,8 @@ async def update_task_run( bool: whether or not matching rows were found to update """ update_stmt = ( - sa.update(db.TaskRun).where(db.TaskRun.id == task_run_id) + sa.update(db.TaskRun) + .where(db.TaskRun.id == task_run_id) # exclude_unset=True allows us to only update values provided by # the user, ignoring any defaults on the model .values(**task_run.dict(shallow=True, exclude_unset=True)) diff --git a/src/prefect/server/orchestration/dependencies.py b/src/prefect/server/orchestration/dependencies.py index b46f889410ef..a2d21e458507 100644 --- a/src/prefect/server/orchestration/dependencies.py +++ b/src/prefect/server/orchestration/dependencies.py @@ -93,14 +93,14 @@ async def parameter_lambda(): return tmp_orchestration_parameters try: - ORCHESTRATION_DEPENDENCIES["task_orchestration_parameters_provider"] = ( - parameter_lambda - ) + ORCHESTRATION_DEPENDENCIES[ + "task_orchestration_parameters_provider" + ] = parameter_lambda yield finally: - ORCHESTRATION_DEPENDENCIES["task_orchestration_parameters_provider"] = ( - starting_task_orchestration_parameters - ) + ORCHESTRATION_DEPENDENCIES[ + "task_orchestration_parameters_provider" + ] = starting_task_orchestration_parameters @contextmanager @@ -113,11 +113,11 @@ async def parameter_lambda(): return tmp_orchestration_parameters try: - ORCHESTRATION_DEPENDENCIES["flow_orchestration_parameters_provider"] = ( - parameter_lambda - ) + ORCHESTRATION_DEPENDENCIES[ + "flow_orchestration_parameters_provider" + ] = parameter_lambda yield finally: - ORCHESTRATION_DEPENDENCIES["flow_orchestration_parameters_provider"] = ( - starting_flow_orchestration_parameters - ) + ORCHESTRATION_DEPENDENCIES[ + "flow_orchestration_parameters_provider" + ] = starting_flow_orchestration_parameters diff --git a/src/prefect/server/utilities/database.py b/src/prefect/server/utilities/database.py index 9076d823357c..1b2ba6acf230 100644 --- a/src/prefect/server/utilities/database.py +++ b/src/prefect/server/utilities/database.py @@ -322,8 +322,7 @@ def _date_add_sqlite(element, compiler, **kwargs): sa.func.julianday(dt) + ( # convert interval to fractional days after the epoch - sa.func.julianday(interval) - - 2440587.5 + sa.func.julianday(interval) - 2440587.5 ), ) ) @@ -426,7 +425,8 @@ def _date_diff_sqlite(element, compiler, **kwargs): # the epoch in julian days 2440587.5 # plus the date difference in julian days - + sa.func.julianday(d1) - sa.func.julianday(d2), + + sa.func.julianday(d1) + - sa.func.julianday(d2), ) ) diff --git a/src/prefect/settings.py b/src/prefect/settings.py index a2e346e876ed..bb1280c12dc4 100644 --- a/src/prefect/settings.py +++ b/src/prefect/settings.py @@ -420,9 +420,7 @@ def warn_on_misconfigured_api_url(values): ) if warnings_list: - example = ( - 'e.g. PREFECT_API_URL="https://api.prefect.cloud/api/accounts/[ACCOUNT-ID]/workspaces/[WORKSPACE-ID]"' - ) + example = 'e.g. PREFECT_API_URL="https://api.prefect.cloud/api/accounts/[ACCOUNT-ID]/workspaces/[WORKSPACE-ID]"' warnings_list.append(example) warnings.warn("\n".join(warnings_list), stacklevel=2) diff --git a/src/prefect/software/pip.py b/src/prefect/software/pip.py index 228c79c0f7b4..64f79866b942 100644 --- a/src/prefect/software/pip.py +++ b/src/prefect/software/pip.py @@ -68,7 +68,7 @@ def _is_editable_install(dist: "importlib_metadata.Distribution") -> bool: def _remove_distributions_required_by_others( - dists: Dict[str, "importlib_metadata.Distribution"] + dists: Dict[str, "importlib_metadata.Distribution"], ) -> Dict[str, "importlib_metadata.Distribution"]: # Collect all child requirements child_requirement_names = set() diff --git a/src/prefect/testing/standard_test_suites/task_runners.py b/src/prefect/testing/standard_test_suites/task_runners.py index 9db623550ab7..4e163045e106 100644 --- a/src/prefect/testing/standard_test_suites/task_runners.py +++ b/src/prefect/testing/standard_test_suites/task_runners.py @@ -121,16 +121,14 @@ def test_flow(): assert c.name == "NotReady" assert ( f"Upstream task run '{b.state_details.task_run_id}' did not reach a" - " 'COMPLETED' state" - in c.message + " 'COMPLETED' state" in c.message ) assert d.is_pending() assert d.name == "NotReady" assert ( f"Upstream task run '{c.state_details.task_run_id}' did not reach a" - " 'COMPLETED' state" - in d.message + " 'COMPLETED' state" in d.message ) def test_sync_tasks_run_sequentially_with_sequential_concurrency_type( diff --git a/src/prefect/utilities/asyncutils.py b/src/prefect/utilities/asyncutils.py index cbea60bea37f..0ee7a9ad654e 100644 --- a/src/prefect/utilities/asyncutils.py +++ b/src/prefect/utilities/asyncutils.py @@ -55,7 +55,7 @@ def get_thread_limiter(): def is_async_fn( - func: Union[Callable[P, R], Callable[P, Awaitable[R]]] + func: Union[Callable[P, R], Callable[P, Awaitable[R]]], ) -> TypeGuard[Callable[P, Awaitable[R]]]: """ Returns `True` if a function returns a coroutine. diff --git a/src/prefect/utilities/collections.py b/src/prefect/utilities/collections.py index 4ee5712739f3..4d92739ad56d 100644 --- a/src/prefect/utilities/collections.py +++ b/src/prefect/utilities/collections.py @@ -106,7 +106,7 @@ def dict_to_flatdict( def flatdict_to_dict( - dct: Dict[Tuple[KT, ...], VT] + dct: Dict[Tuple[KT, ...], VT], ) -> Dict[KT, Union[VT, Dict[KT, VT]]]: """Converts a flattened dictionary back to a nested dictionary. diff --git a/tests/_internal/concurrency/test_cancellation.py b/tests/_internal/concurrency/test_cancellation.py index f5c6b109cd35..f497cd40a442 100644 --- a/tests/_internal/concurrency/test_cancellation.py +++ b/tests/_internal/concurrency/test_cancellation.py @@ -407,7 +407,10 @@ def test_cancel_sync_nested_watchers_inner_cancelled(mock_alarm_signal_handler): assert inner_scope.cancelled() assert not completed - mock_alarm_signal_handler.assert_not_called(), "Alarm based handler should not be used" + ( + mock_alarm_signal_handler.assert_not_called(), + "Alarm based handler should not be used", + ) def test_cancel_sync_nested_watchers_outer_cancelled(mock_alarm_signal_handler): @@ -426,7 +429,10 @@ def test_cancel_sync_nested_watchers_outer_cancelled(mock_alarm_signal_handler): assert outer_scope.cancelled() assert not completed - mock_alarm_signal_handler.assert_not_called(), "Alarm based handler should not be used" + ( + mock_alarm_signal_handler.assert_not_called(), + "Alarm based handler should not be used", + ) @pytest.mark.timeout(method="thread") # alarm-based pytest-timeout will interfere diff --git a/tests/agent/test_agent_run_cancellation.py b/tests/agent/test_agent_run_cancellation.py index a9d4bc0f2962..b0889a35d8d2 100644 --- a/tests/agent/test_agent_run_cancellation.py +++ b/tests/agent/test_agent_run_cancellation.py @@ -310,8 +310,7 @@ async def test_agent_cancel_run_with_missing_infrastructure_pid( # Information broadcasted to user in logs and state message assert ( "does not have an infrastructure pid attached. Cancellation cannot be" - " guaranteed." - in caplog.text + " guaranteed." in caplog.text ) assert "missing infrastructure tracking information" in post_flow_run.state.message diff --git a/tests/agent/test_agent_run_submission.py b/tests/agent/test_agent_run_submission.py index 58b189d5dd38..9d30234cab4f 100644 --- a/tests/agent/test_agent_run_submission.py +++ b/tests/agent/test_agent_run_submission.py @@ -848,8 +848,7 @@ async def test_agent_crashes_flow_if_infrastructure_returns_nonzero_status_code( ) assert ( f"Reported flow run '{flow_run.id}' as crashed: " - "Flow run infrastructure exited with non-zero status code 9." - in caplog.text + "Flow run infrastructure exited with non-zero status code 9." in caplog.text ) state = (await prefect_client.read_flow_run(flow_run.id)).state diff --git a/tests/cli/test_deploy.py b/tests/cli/test_deploy.py index b8cc2fc2b37f..dfbc592e503a 100644 --- a/tests/cli/test_deploy.py +++ b/tests/cli/test_deploy.py @@ -794,8 +794,7 @@ async def test_deploy_does_not_prompt_storage_when_pull_step_exists( ), user_input=( # don't save the deployment configuration - "n" - + readchar.key.ENTER + "n" + readchar.key.ENTER ), expected_code=0, expected_output_does_not_contain=[ diff --git a/tests/cli/test_work_pool.py b/tests/cli/test_work_pool.py index 1c0b7ef255d8..337b7db7b95d 100644 --- a/tests/cli/test_work_pool.py +++ b/tests/cli/test_work_pool.py @@ -330,8 +330,7 @@ async def test_create_with_provision_infra_unsupported(self): assert res.exit_code == 0 assert ( "Automatic infrastructure provisioning is not supported for 'fake' work" - " pools." - in res.output + " pools." in res.output ) @pytest.mark.usefixtures("interactive_console", "mock_collection_registry") @@ -751,6 +750,5 @@ async def test_provision_infra_unsupported(self, push_work_pool): assert res.exit_code == 0 assert ( "Automatic infrastructure provisioning is not supported for" - " 'push-work-pool:push' work pools." - in res.output + " 'push-work-pool:push' work pools." in res.output ) diff --git a/tests/client/test_base_client.py b/tests/client/test_base_client.py index 6dac8ddc0eb4..f0fce14a601f 100644 --- a/tests/client/test_base_client.py +++ b/tests/client/test_base_client.py @@ -362,9 +362,7 @@ async def test_prefect_httpx_client_respects_retry_header_per_response( request=Request("a test request", "fake.url/fake/route"), ) for retry_after in [5, 0, 10, 2.0] - ] + [ - RESPONSE_200 - ] # Then succeed + ] + [RESPONSE_200] # Then succeed with mock_anyio_sleep.assert_sleeps_for(5 + 10 + 2): async with client: @@ -426,7 +424,8 @@ async def test_prefect_httpx_client_adds_jitter_with_exponential_backoff( ] with mock_anyio_sleep.assert_sleeps_for( - 2 + 4 + 8, extra_tolerance=0.2 * 14 # Add tolerance for jitter + 2 + 4 + 8, + extra_tolerance=0.2 * 14, # Add tolerance for jitter ): async with client: response = await client.post( diff --git a/tests/client/test_prefect_client.py b/tests/client/test_prefect_client.py index e4df941b1614..c23197a8e7f5 100644 --- a/tests/client/test_prefect_client.py +++ b/tests/client/test_prefect_client.py @@ -1163,10 +1163,10 @@ async def test_create_then_read_flow_run_notification_policy( message_template=message_template, ) - response: List[FlowRunNotificationPolicy] = ( - await prefect_client.read_flow_run_notification_policies( - FlowRunNotificationPolicyFilter(is_active={"eq_": True}), - ) + response: List[ + FlowRunNotificationPolicy + ] = await prefect_client.read_flow_run_notification_policies( + FlowRunNotificationPolicyFilter(is_active={"eq_": True}), ) assert len(response) == 1 @@ -1211,10 +1211,10 @@ async def test_create_then_update_flow_run_notification_policy( message_template=message_template, ) - response: List[FlowRunNotificationPolicy] = ( - await prefect_client.read_flow_run_notification_policies( - FlowRunNotificationPolicyFilter(is_active={"eq_": False}) - ) + response: List[ + FlowRunNotificationPolicy + ] = await prefect_client.read_flow_run_notification_policies( + FlowRunNotificationPolicyFilter(is_active={"eq_": False}) ) assert len(response) == 1 @@ -1242,10 +1242,10 @@ async def test_create_then_delete_flow_run_notification_policy( await prefect_client.delete_flow_run_notification_policy(notification_policy_id) - response: List[FlowRunNotificationPolicy] = ( - await prefect_client.read_flow_run_notification_policies( - FlowRunNotificationPolicyFilter(is_active={"eq_": True}), - ) + response: List[ + FlowRunNotificationPolicy + ] = await prefect_client.read_flow_run_notification_policies( + FlowRunNotificationPolicyFilter(is_active={"eq_": True}), ) assert len(response) == 0 diff --git a/tests/deployment/test_steps.py b/tests/deployment/test_steps.py index d7ed5ff66e9e..7be3a21e5a2e 100644 --- a/tests/deployment/test_steps.py +++ b/tests/deployment/test_steps.py @@ -802,8 +802,7 @@ async def test_pip_install_fails_on_error(self): assert ( "pip_install_requirements failed with error code 1: ERROR: Could not open " "requirements file: [Errno 2] No such file or directory: " - "'doesnt-exist.txt'" - in str(exc.value) + "'doesnt-exist.txt'" in str(exc.value) ) diff --git a/tests/infrastructure/provisioners/test_ecs.py b/tests/infrastructure/provisioners/test_ecs.py index c8b003201c59..860ee563763f 100644 --- a/tests/infrastructure/provisioners/test_ecs.py +++ b/tests/infrastructure/provisioners/test_ecs.py @@ -30,7 +30,7 @@ def start_mocking_aws(monkeypatch): monkeypatch.setenv( "MOTO_IAM_LOAD_MANAGED_POLICIES", "true" - ) # tell moto to explcitly load managed policies + ) # tell moto to explicitly load managed policies monkeypatch.setenv("AWS_DEFAULT_REGION", "us-east-1") with mock_aws(): yield @@ -436,8 +436,7 @@ async def test_get_planned_actions(self, authentication_resource): ) assert ( "Creating and attaching an IAM policy for managing ECS tasks:" - " [blue]prefect-ecs-policy[/]" - in actions + " [blue]prefect-ecs-policy[/]" in actions ) assert "Storing generated AWS credentials in a block" in actions diff --git a/tests/infrastructure/test_docker_container.py b/tests/infrastructure/test_docker_container.py index 6bbd54a4b728..d73a346cbb1f 100644 --- a/tests/infrastructure/test_docker_container.py +++ b/tests/infrastructure/test_docker_container.py @@ -864,8 +864,7 @@ def test_logs_when_unexpected_docker_error(caplog, mock_docker_client): assert ( "An unexpected Docker API error occurred while streaming output from container" - " fake-name." - in caplog.text + " fake-name." in caplog.text ) diff --git a/tests/runner/test_storage.py b/tests/runner/test_storage.py index 8a2dea82b48f..d9b9a3ca24de 100644 --- a/tests/runner/test_storage.py +++ b/tests/runner/test_storage.py @@ -208,8 +208,7 @@ def test_eq(self): def test_repr(self): repo = GitRepository(url="https://github.com/org/repo.git") assert ( - repr(repo) - == "GitRepository(name='repo'" + repr(repo) == "GitRepository(name='repo'" " repository='https://github.com/org/repo.git', branch=None)" ) diff --git a/tests/runner/test_webserver.py b/tests/runner/test_webserver.py index 28b9197e2e79..deae0668a357 100644 --- a/tests/runner/test_webserver.py +++ b/tests/runner/test_webserver.py @@ -251,8 +251,7 @@ def new_flow(): # we should have logged a warning assert ( "Flow new-flow is not directly managed by the runner. Please " - "include it in the runner's served flows' import namespace." - in caplog.text + "include it in the runner's served flows' import namespace." in caplog.text ) @mock.patch("prefect.runner.server.load_flow_from_entrypoint") diff --git a/tests/server/api/test_deployments.py b/tests/server/api/test_deployments.py index 635747a1a026..927adf961023 100644 --- a/tests/server/api/test_deployments.py +++ b/tests/server/api/test_deployments.py @@ -788,8 +788,7 @@ async def test_create_deployment_enforces_parameter_schema( assert response.status_code == 422 assert ( "Validation failed for field 'foo'. Failure reason: 1 is not of type" - " 'string'" - in response.text + " 'string'" in response.text ) async def test_create_deployment_does_not_enforce_schema_by_default( diff --git a/tests/server/api/test_workers.py b/tests/server/api/test_workers.py index fc7c4c37b712..79d46e68028c 100644 --- a/tests/server/api/test_workers.py +++ b/tests/server/api/test_workers.py @@ -152,8 +152,7 @@ async def test_create_work_pool_template_validation_missing_keys(self, client): assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY assert ( "The `base_job_template` must contain both a `job_configuration` key and a" - " `variables` key." - in response.json()["exception_detail"][0]["msg"] + " `variables` key." in response.json()["exception_detail"][0]["msg"] ) async def test_create_work_pool_template_validation_missing_variables(self, client): @@ -373,8 +372,7 @@ async def test_update_work_pool_template_validation_missing_keys( assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY assert ( "The `base_job_template` must contain both a `job_configuration` key and a" - " `variables` key." - in response.json()["exception_detail"][0]["msg"] + " `variables` key." in response.json()["exception_detail"][0]["msg"] ) async def test_update_work_pool_template_validation_missing_variables( diff --git a/tests/server/models/test_variables.py b/tests/server/models/test_variables.py index 9254179a996c..af3d659f1497 100644 --- a/tests/server/models/test_variables.py +++ b/tests/server/models/test_variables.py @@ -260,7 +260,9 @@ async def test_update_name( ): new_name = "another_name" updated = await update_variable( - session, variable.id, VariableUpdate(name=new_name) # type: ignore + session, + variable.id, + VariableUpdate(name=new_name), # type: ignore ) assert updated @@ -275,7 +277,9 @@ async def test_update_value( ): new_value = "another_name" updated = await update_variable( - session, variable.id, VariableUpdate(value=new_value) # type: ignore + session, + variable.id, + VariableUpdate(value=new_value), # type: ignore ) assert updated @@ -290,7 +294,9 @@ async def test_update_tags( ): new_tags = ["new-tag-123"] updated = await update_variable( - session, variable.id, VariableUpdate(tags=new_tags) # type: ignore + session, + variable.id, + VariableUpdate(tags=new_tags), # type: ignore ) assert updated diff --git a/tests/server/orchestration/test_rules.py b/tests/server/orchestration/test_rules.py index cf4cbed8836b..de2f7dbeebea 100644 --- a/tests/server/orchestration/test_rules.py +++ b/tests/server/orchestration/test_rules.py @@ -1570,7 +1570,10 @@ async def cleanup(self, initial_state, validated_state, context): before_transition_hook.assert_called_once() if proposed_state_type is not None: after_transition_hook.assert_not_called() - cleanup_hook.assert_called_once(), "Cleanup should be called when transition is aborted" + ( + cleanup_hook.assert_called_once(), + "Cleanup should be called when transition is aborted", + ) else: after_transition_hook.assert_called_once(), "Rule expected no transition" cleanup_hook.assert_not_called() @@ -1635,7 +1638,10 @@ async def cleanup(self, initial_state, validated_state, context): before_transition_hook.assert_called_once() if proposed_state_type is not None: after_transition_hook.assert_not_called() - cleanup_hook.assert_called_once(), "Cleanup should be called when transition is aborted" + ( + cleanup_hook.assert_called_once(), + "Cleanup should be called when transition is aborted", + ) else: after_transition_hook.assert_called_once(), "Rule expected no transition" cleanup_hook.assert_not_called() diff --git a/tests/test_context.py b/tests/test_context.py index 6817487dc114..c20368c3d2ee 100644 --- a/tests/test_context.py +++ b/tests/test_context.py @@ -361,8 +361,7 @@ def test_root_settings_context_missing_cli(self, monkeypatch, capsys): _, err = capsys.readouterr() assert ( "profile 'bar' set by command line argument not found. The default profile" - " will be used instead." - in err + " will be used instead." in err ) def test_root_settings_context_missing_environment_variables( @@ -375,8 +374,7 @@ def test_root_settings_context_missing_environment_variables( _, err = capsys.readouterr() assert ( "profile 'bar' set by environment variable not found. The default profile" - " will be used instead." - in err + " will be used instead." in err ) @pytest.mark.usefixtures("remove_existing_settings_context") diff --git a/tests/test_deployments.py b/tests/test_deployments.py index bd4131e6ecba..d290d48654de 100644 --- a/tests/test_deployments.py +++ b/tests/test_deployments.py @@ -856,14 +856,15 @@ async def test_running_a_deployment_blocks_until_termination_async( ) assert ( - await run_deployment( - f"{d.flow_name}/{d.name}", - timeout=2, - poll_interval=0, - ) - ).state.type == terminal_state, ( - "run_deployment does not exit on {terminal_state}" - ) + ( + await run_deployment( + f"{d.flow_name}/{d.name}", + timeout=2, + poll_interval=0, + ) + ).state.type + == terminal_state + ), "run_deployment does not exit on {terminal_state}" assert len(flow_polls.calls) == 3 async def test_run_deployment_with_ephemeral_api( diff --git a/tests/test_engine.py b/tests/test_engine.py index 992e47f3d974..97a263d8923a 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -2057,8 +2057,7 @@ def my_task(x): assert ( "Received non-final state 'Failed' when proposing final state 'Failed' and" - " will not attempt to run again..." - not in caplog.text + " will not attempt to run again..." not in caplog.text ) async def test_retry_condition_fn_retry_handler_returns_notfalse_retries( @@ -2112,8 +2111,7 @@ def my_task(x): assert ( "Received non-final state 'AwaitingRetry' when proposing final state" - " 'Failed' and will attempt to run again..." - in caplog.text + " 'Failed' and will attempt to run again..." in caplog.text ) async def test_proposes_unknown_result_if_state_is_completed_and_result_data_is_missing( @@ -2867,8 +2865,7 @@ async def test_handles_signature_mismatches( assert "Validation of flow parameters failed with error" in state.message assert ( "SignatureMismatchError: Function expects parameters ['dog', 'cat'] but was" - " provided with parameters ['puppy', 'kitty']" - in state.message + " provided with parameters ['puppy', 'kitty']" in state.message ) with pytest.raises(SignatureMismatchError): await state.result() @@ -2962,8 +2959,7 @@ async def test_handles_signature_mismatches( assert "Validation of flow parameters failed with error" in state.message assert ( "SignatureMismatchError: Function expects parameters ['dog', 'cat'] but was" - " provided with parameters ['puppy', 'kitty']" - in state.message + " provided with parameters ['puppy', 'kitty']" in state.message ) with pytest.raises(SignatureMismatchError): await state.result() @@ -3041,8 +3037,7 @@ async def test_handles_signature_mismatches( assert "Validation of flow parameters failed with error" in state.message assert ( "SignatureMismatchError: Function expects parameters ['dog', 'cat'] but was" - " provided with parameters ['puppy', 'kitty']" - in state.message + " provided with parameters ['puppy', 'kitty']" in state.message ) with pytest.raises(SignatureMismatchError): await state.result() @@ -3309,8 +3304,7 @@ def my_flow(): assert ( "Task runner 'MyTaskRunner' does not implement the" " `duplicate` method and will fail if used for concurrent execution of" - " the same flow." - in caplog.text + " the same flow." in caplog.text ) @@ -3339,8 +3333,7 @@ def parent(): assert ( "Task runner 'MyTaskRunner' does not implement the" " `duplicate` method and will fail if used for concurrent execution of" - " the same flow." - in caplog.text + " the same flow." in caplog.text ) diff --git a/tests/test_flows.py b/tests/test_flows.py index b0083ae68d7a..b0826c035266 100644 --- a/tests/test_flows.py +++ b/tests/test_flows.py @@ -2244,8 +2244,7 @@ def dog(): # Make sure that warning is raised assert ( "Script loading is in progress, flow 'dog' will not be executed. " - "Consider updating the script to only call the flow" - in caplog.text + "Consider updating the script to only call the flow" in caplog.text ) flow_runs = await prefect_client.read_flows() diff --git a/tests/test_plugins.py b/tests/test_plugins.py index af0905506f6e..5afe4c876c6a 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -95,8 +95,7 @@ def test_load_extra_entrypoints_unparsable_entrypoint(capsys): _, stderr = capsys.readouterr() assert ( "Warning! Failed to load extra entrypoint 'foo$bar': " - "AttributeError: 'NoneType' object has no attribute 'group'" - in stderr + "AttributeError: 'NoneType' object has no attribute 'group'" in stderr ) @@ -154,8 +153,7 @@ def test_load_extra_entrypoints_callable_that_raises(capsys): _, stderr = capsys.readouterr() assert ( "Warning! Failed to run callable entrypoint " - "'test_module_name:raises_value_error': ValueError: test" - in stderr + "'test_module_name:raises_value_error': ValueError: test" in stderr ) @@ -179,8 +177,7 @@ def test_load_extra_entrypoints_error_on_import(capsys): _, stderr = capsys.readouterr() assert ( "Warning! Failed to load extra entrypoint 'raising_module_name': " - "RuntimeError: test" - in stderr + "RuntimeError: test" in stderr ) @@ -198,8 +195,7 @@ def test_load_extra_entrypoints_missing_module(capsys): _, stderr = capsys.readouterr() assert ( "Warning! Failed to load extra entrypoint 'nonexistant_module': " - "ModuleNotFoundError" - in stderr + "ModuleNotFoundError" in stderr ) @@ -222,8 +218,7 @@ def test_load_extra_entrypoints_missing_submodule(capsys): _, stderr = capsys.readouterr() assert ( "Warning! Failed to load extra entrypoint 'test_module_name.missing_module': " - "ModuleNotFoundError" - in stderr + "ModuleNotFoundError" in stderr ) @@ -243,8 +238,7 @@ def test_load_extra_entrypoints_missing_attribute(capsys): _, stderr = capsys.readouterr() assert ( "Warning! Failed to load extra entrypoint 'test_module_name:missing_attr': " - "AttributeError" - in stderr + "AttributeError" in stderr ) diff --git a/tests/utilities/test_filesystem.py b/tests/utilities/test_filesystem.py index 8f4cc9223b9c..edd3c4af6648 100644 --- a/tests/utilities/test_filesystem.py +++ b/tests/utilities/test_filesystem.py @@ -167,7 +167,7 @@ def test_get_open_file_limit(): # The functions that check the open file limit on either Windows or Unix # have an 'Any' return type, so this assertion ensures any changes to the # function don't break its contract. - assert type(limit) == int + assert isinstance(limit, int) # It shouldn't be possible to have a negative open file limit. assert limit >= 0 diff --git a/tests/workers/test_base_worker.py b/tests/workers/test_base_worker.py index 36a2b02a3d9e..7e1e5832e6a2 100644 --- a/tests/workers/test_base_worker.py +++ b/tests/workers/test_base_worker.py @@ -1260,56 +1260,59 @@ async def run(self): async def verify_submitted_deployment(self, deployment): pass - assert WorkerImplWithCustomBaseJobConfiguration.get_default_base_job_template() == { - "job_configuration": { - "command": "{{ command }}", - "env": "{{ env }}", - "labels": "{{ labels }}", - "name": "{{ name }}", - "var1": "{{ var1 }}", - "var2": "{{ var2 }}", - }, - "variables": { - "properties": { - "command": { - "title": "Command", - "type": "string", - "description": ( - "The command to use when starting a flow run. " - "In most cases, this should be left blank and the command " - "will be automatically generated by the worker." - ), - }, - "env": { - "title": "Environment Variables", - "type": "object", - "additionalProperties": {"type": "string"}, - "description": ( - "Environment variables to set when starting a flow run." - ), - }, - "labels": { - "title": "Labels", - "type": "object", - "additionalProperties": {"type": "string"}, - "description": ( - "Labels applied to infrastructure created by a worker." - ), - }, - "name": { - "title": "Name", - "type": "string", - "description": ( - "Name given to infrastructure created by a worker." - ), + assert ( + WorkerImplWithCustomBaseJobConfiguration.get_default_base_job_template() + == { + "job_configuration": { + "command": "{{ command }}", + "env": "{{ env }}", + "labels": "{{ labels }}", + "name": "{{ name }}", + "var1": "{{ var1 }}", + "var2": "{{ var2 }}", + }, + "variables": { + "properties": { + "command": { + "title": "Command", + "type": "string", + "description": ( + "The command to use when starting a flow run. " + "In most cases, this should be left blank and the command " + "will be automatically generated by the worker." + ), + }, + "env": { + "title": "Environment Variables", + "type": "object", + "additionalProperties": {"type": "string"}, + "description": ( + "Environment variables to set when starting a flow run." + ), + }, + "labels": { + "title": "Labels", + "type": "object", + "additionalProperties": {"type": "string"}, + "description": ( + "Labels applied to infrastructure created by a worker." + ), + }, + "name": { + "title": "Name", + "type": "string", + "description": ( + "Name given to infrastructure created by a worker." + ), + }, + "var1": {"title": "Var1", "type": "string"}, + "var2": {"title": "Var2", "type": "integer", "default": 1}, }, - "var1": {"title": "Var1", "type": "string"}, - "var2": {"title": "Var2", "type": "integer", "default": 1}, + "required": ["var1"], + "type": "object", }, - "required": ["var1"], - "type": "object", - }, - } + } + ) class TestPrepareForFlowRun: @@ -1605,8 +1608,7 @@ async def test_worker_cancel_run_with_missing_infrastructure_pid( # Information broadcasted to user in logs and state message assert ( "does not have an infrastructure pid attached. Cancellation cannot be" - " guaranteed." - in caplog.text + " guaranteed." in caplog.text ) assert ( "missing infrastructure tracking information" in post_flow_run.state.message @@ -1868,8 +1870,7 @@ async def run(self, flow_run, configuration, task_status=None): assert ( f"Worker type {worker_type!r} does not support killing created" - " infrastructure." - in caplog.text + " infrastructure." in caplog.text ) assert "Cancellation cannot be guaranteed." in caplog.text diff --git a/versioneer.py b/versioneer.py index 6fce0a8cb653..987c46774df4 100644 --- a/versioneer.py +++ b/versioneer.py @@ -418,9 +418,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= return stdout, process.returncode -LONG_VERSION_PY[ - "git" -] = r''' +LONG_VERSION_PY["git"] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build